Build Nightly Images (cronjob) #1680
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # template file: 050.single_header.yaml | |
| name: "Build Nightly Images (cronjob)" | |
| on: | |
| schedule: | |
| - cron: '00 8 * * *' | |
| push: | |
| branches: | |
| - 'main' | |
| paths: | |
| - 'userpatches/targets-release-nightly.yaml' | |
| workflow_call: | |
| inputs: | |
| ref: # commit id | |
| required: false | |
| type: string | |
| extraParamsAllBuilds: # addional build parameter | |
| required: false | |
| type: string | |
| secrets: | |
| ORG_MEMBERS: | |
| required: true | |
| workflow_dispatch: | |
| inputs: | |
| skipImages: | |
| description: 'Skip building images? no = build images, yes = skip images' | |
| required: true | |
| options: [ 'yes', 'no' ] | |
| type: choice | |
| default: 'no' | |
| checkOci: | |
| description: 'Check OCI for existing artifacts? yes = check OCI, no = always build everything' | |
| required: true | |
| options: [ 'yes', 'no' ] | |
| type: choice | |
| default: 'yes' | |
| extraParamsAllBuilds: | |
| description: 'Extra params for all builds/jobs (prepare/artifact/image) (eg: DEBUG=yes)' | |
| required: false | |
| default: '' | |
| type: string | |
| branch: | |
| type: choice | |
| description: 'Framework build branch' | |
| options: | |
| # branches | |
| - main | |
| - update-maintainers | |
| - v25.11 | |
| - flasher | |
| - update-kernel-configs | |
| - sunxi | |
| - coderabbitai/docstrings/2e13998 | |
| - revert-8583-fix-atf-compile | |
| - test | |
| - v25.08 | |
| - v25.05 | |
| - revert-8484-add-and-enable-lte-em05-driver | |
| - igorpecovnik-patch-2 | |
| - igorpecovnik-patch-1 | |
| - logofix | |
| - coderabbitai/utg/20bTtA | |
| - coderabbitai/docstrings/20bTtA | |
| - opi5-mainline-uboot-update | |
| - prepre | |
| - fixper | |
| default: 'main' | |
| board: | |
| type: choice | |
| description: 'Board' | |
| options: | |
| # boards | |
| - 9tripod-x3568-v4 | |
| - aml-a311d-cc | |
| - aml-c400-plus | |
| - aml-s805-mxq | |
| - aml-s905d3-cc | |
| - aml-s9xx-box | |
| - aml-t95z-plus | |
| - armsom-aim7-io | |
| - armsom-cm5-io | |
| - armsom-cm5-rpi-cm4-io | |
| - armsom-forge1 | |
| - armsom-sige1 | |
| - armsom-sige3 | |
| - armsom-sige5 | |
| - armsom-sige7 | |
| - armsom-w3 | |
| - avaota-a1 | |
| - ayn-odin2 | |
| - bananapi | |
| - bananapicm4io | |
| - bananapif3 | |
| - bananapim1plus | |
| - bananapim2plus | |
| - bananapim2pro | |
| - bananapim2s | |
| - bananapim2ultra | |
| - bananapim2zero | |
| - bananapim3 | |
| - bananapim4berry | |
| - bananapim4zero | |
| - bananapim5 | |
| - bananapim5pro | |
| - bananapim64 | |
| - bananapim7 | |
| - bananapipro | |
| - bananapir2 | |
| - bananapir2pro | |
| - bananapir4 | |
| - beaglebone-ai64 | |
| - beagleplay | |
| - beagley-ai | |
| - beelinkx2 | |
| - bestv-r3300-l | |
| - bigtreetech-cb1 | |
| - bigtreetech-cb2 | |
| - cainiao-cniot-core | |
| - cherryba-m1 | |
| - clearfogbase | |
| - clearfogpro | |
| - clockworkpi-a06 | |
| - cm3588-nas | |
| - coolpi-cm5 | |
| - coolpi-genbook | |
| - cubieboard | |
| - cubieboard2 | |
| - cubietruck | |
| - cubox-i | |
| - cyber-aib-rk3588 | |
| - dshanpi-a1 | |
| - dshanpi-r1 | |
| - dusun-dsom-010r | |
| - espressobin | |
| - fine3399 | |
| - firefly-itx-3588j | |
| - firefly-rk3399 | |
| - fxblox-rk1 | |
| - gateway-gz80x | |
| - h96-tvbox-3566 | |
| - helios4 | |
| - helios64 | |
| - hikey960 | |
| - hinlink-h28k | |
| - hinlink-h66k | |
| - hinlink-h68k | |
| - hinlink-h88k | |
| - hinlink-hnas | |
| - hinlink-ht2 | |
| - imb3588 | |
| - indiedroid-nova | |
| - inovato-quadra | |
| - jethubj100 | |
| - jethubj200 | |
| - jethubj80 | |
| - jetson-nano | |
| - jp-tvbox-3566 | |
| - khadas-edge | |
| - khadas-edge2 | |
| - khadas-vim1 | |
| - khadas-vim1s | |
| - khadas-vim2 | |
| - khadas-vim3 | |
| - khadas-vim3l | |
| - khadas-vim4 | |
| - kickpik2b | |
| - lafrite | |
| - lckfb-taishanpi | |
| - leez-p710 | |
| - lepotato | |
| - lime | |
| - lime-a33 | |
| - lime-a64 | |
| - lime2 | |
| - longanpi-3h | |
| - longanpi-4b | |
| - lubancat2 | |
| - luckfox-core3566 | |
| - luckfox-lyra-plus | |
| - luckfox-lyra-ultra-w | |
| - luckfox-lyra-zero-w | |
| - luckfox-pico-max | |
| - luckfox-pico-mini | |
| - mangopi-m28k | |
| - mba8mpxl | |
| - mba8mpxl-ras314 | |
| - mekotronics-r58-4x4 | |
| - mekotronics-r58-minipc | |
| - mekotronics-r58hd | |
| - mekotronics-r58x | |
| - mekotronics-r58x-4g | |
| - mekotronics-r58x-pro | |
| - melea1000 | |
| - mixtile-blade3 | |
| - mixtile-edge2 | |
| - mk808c | |
| - mksklipad50 | |
| - mkspi | |
| - nanopct4 | |
| - nanopct6 | |
| - nanopct6-lts | |
| - nanopi-m5 | |
| - nanopi-m6 | |
| - nanopi-r1 | |
| - nanopi-r1s-h5 | |
| - nanopi-r2c | |
| - nanopi-r2s | |
| - nanopi-r2s-plus | |
| - nanopi-r3s | |
| - nanopi-r3s-lts | |
| - nanopi-r4s | |
| - nanopi-r4se | |
| - nanopi-r5c | |
| - nanopi-r5s | |
| - nanopi-r6c | |
| - nanopi-r6s | |
| - nanopi-r76s | |
| - nanopia64 | |
| - nanopiair | |
| - nanopiduo | |
| - nanopiduo2 | |
| - nanopik1plus | |
| - nanopik2-s905 | |
| - nanopim4 | |
| - nanopim4v2 | |
| - nanopineo | |
| - nanopineo2 | |
| - nanopineo2black | |
| - nanopineo3 | |
| - nanopineo4 | |
| - nanopineocore2 | |
| - nanopineoplus2 | |
| - odroidc1 | |
| - odroidc2 | |
| - odroidc4 | |
| - odroidhc4 | |
| - odroidm1 | |
| - odroidm1s | |
| - odroidm2 | |
| - odroidn2 | |
| - odroidn2l | |
| - odroidxu4 | |
| - olimex-a20-olinuxino-micro | |
| - olimex-teres-a64 | |
| - olinux-som-a13 | |
| - onecloud | |
| - oneplus-kebab | |
| - orangepi-r1 | |
| - orangepi-r1plus | |
| - orangepi-r1plus-lts | |
| - orangepi-rk3399 | |
| - orangepi2 | |
| - orangepi3 | |
| - orangepi3-lts | |
| - orangepi3b | |
| - orangepi4 | |
| - orangepi4-lts | |
| - orangepi5 | |
| - orangepi5-max | |
| - orangepi5-plus | |
| - orangepi5-ultra | |
| - orangepi5b | |
| - orangepi5pro | |
| - orangepilite | |
| - orangepilite2 | |
| - orangepione | |
| - orangepioneplus | |
| - orangepipc | |
| - orangepipc2 | |
| - orangepipcplus | |
| - orangepiplus | |
| - orangepiplus2e | |
| - orangepiprime | |
| - orangepiwin | |
| - orangepizero | |
| - orangepizero2 | |
| - orangepizero2w | |
| - orangepizero3 | |
| - orangepizeroplus | |
| - orangepizeroplus2-h3 | |
| - orangepizeroplus2-h5 | |
| - panther-x2 | |
| - pcduino3 | |
| - phytiumpi | |
| - pine64 | |
| - pine64so | |
| - pinebook-a64 | |
| - pinebook-pro | |
| - pinecube | |
| - pineh64 | |
| - pineh64-b | |
| - pocketbeagle2 | |
| - pocketchip-sd | |
| - qemu-uboot-arm64 | |
| - qemu-uboot-x86 | |
| - qemu-uefi-x86 | |
| - quartz64a | |
| - quartz64b | |
| - radxa-cm4-io | |
| - radxa-cm5-io | |
| - radxa-cubie-a5e | |
| - radxa-dragon-q6a | |
| - radxa-e20c | |
| - radxa-e25 | |
| - radxa-e52c | |
| - radxa-e54c | |
| - radxa-nio-12l | |
| - radxa-rock-4d | |
| - radxa-zero | |
| - radxa-zero2 | |
| - radxa-zero3 | |
| - recore | |
| - renegade | |
| - retro-lite-cm5 | |
| - retroidpocket-rp5 | |
| - retroidpocket-rpmini | |
| - rk322x-box | |
| - rk3318-box | |
| - rk3328-heltec | |
| - rk3566-box-demo | |
| - roc-rk3399-pc | |
| - rock-2a | |
| - rock-2f | |
| - rock-3a | |
| - rock-3c | |
| - rock-4se | |
| - rock-5-cm-rpi-cm4-io | |
| - rock-5-cmio | |
| - rock-5-itx | |
| - rock-5a | |
| - rock-5b | |
| - rock-5b-plus | |
| - rock-5c | |
| - rock-5t | |
| - rock-s0 | |
| - rock64 | |
| - rockpi-4a | |
| - rockpi-4b | |
| - rockpi-4bplus | |
| - rockpi-4c | |
| - rockpi-4cplus | |
| - rockpi-e | |
| - rockpi-n10 | |
| - rockpi-s | |
| - rockpro64 | |
| - rpi4b | |
| - sakurapi-rk3308b | |
| - sk-am62b | |
| - sk-am62p | |
| - sk-am64b | |
| - sk-am68 | |
| - sk-am69 | |
| - sk-tda4vm | |
| - smart-am40 | |
| - station-m1 | |
| - station-m2 | |
| - station-m3 | |
| - station-p1 | |
| - station-p2 | |
| - sunvell-r69 | |
| - sweet-potato | |
| - tanix-tx6 | |
| - thinkpad-x13s | |
| - tinker-edge-r | |
| - tinkerboard | |
| - tinkerboard-2 | |
| - tmds62levm | |
| - tritium-h3 | |
| - tritium-h5 | |
| - turing-rk1 | |
| - udoo | |
| - uefi-arm64 | |
| - uefi-loong64 | |
| - uefi-riscv64 | |
| - uefi-x86 | |
| - visionfive2 | |
| - wsl2-arm64 | |
| - wsl2-x86 | |
| - x96-mate | |
| - x96q | |
| - xiaobao-nas | |
| - xiaomi-elish | |
| - xpressreal-t3 | |
| - xt-q8l-v10 | |
| - youyeetoo-r1-v3 | |
| - youyeetoo-yy3588 | |
| - yy3568 | |
| - z28pro | |
| - zeropi | |
| - all | |
| default: 'all' | |
| maintainer: | |
| type: choice | |
| description: 'Maintainer' | |
| options: | |
| # maintainers | |
| - "150balbes" | |
| - "1ubuntuuser" | |
| - "AGM1968" | |
| - "AaronNGray" | |
| - "CodeChenL" | |
| - "ColorfulRhino" | |
| - "DylanHP" | |
| - "Grippy98" | |
| - "Heisath" | |
| - "HeyMeco" | |
| - "IsMrX" | |
| - "JackHuang021" | |
| - "Janmcha" | |
| - "JohnTheCoolingFan" | |
| - "Kreyren" | |
| - "NicoD-SBC" | |
| - "PanderMusubi" | |
| - "Qvy-png" | |
| - "SeeleVolleri" | |
| - "StephenGraf" | |
| - "SuperKali" | |
| - "The-going" | |
| - "TheSnowfield" | |
| - "Tonymac32" | |
| - "ZazaBR" | |
| - "adeepn" | |
| - "ahoneybun" | |
| - "alexl83" | |
| - "amazingfate" | |
| - "andyshrk" | |
| - "biot" | |
| - "brentr" | |
| - "catalinii" | |
| - "chainsx" | |
| - "chraac" | |
| - "devdotnetorg" | |
| - "efectn" | |
| - "eliasbakken" | |
| - "engineer-80" | |
| - "fridtjof" | |
| - "ginkage" | |
| - "glneo" | |
| - "hoochiwetech" | |
| - "hqnicolas" | |
| - "hzyitc" | |
| - "igorpecovnik" | |
| - "janprunk" | |
| - "jeanrhum" | |
| - "joekhoobyar" | |
| - "jomadeto" | |
| - "jonaswood01" | |
| - "juanlufont" | |
| - "kamilsaigol" | |
| - "krachlatte" | |
| - "lbmendes" | |
| - "leggewie" | |
| - "libiunc" | |
| - "linhz0hz" | |
| - "mahdichi" | |
| - "mattx433" | |
| - "mhawkins-consultant" | |
| - "mlegenovic" | |
| - "paolosabatino" | |
| - "prahal" | |
| - "pyavitz" | |
| - "rbqvq" | |
| - "redrathnure" | |
| - "retro98boy" | |
| - "rpardini" | |
| - "schmiedelm" | |
| - "schwar3kat" | |
| - "sgjava" | |
| - "sicXnull" | |
| - "spendist" | |
| - "sputnik2019" | |
| - "teknoid" | |
| - "torte71" | |
| - "utlark" | |
| - "vamzii" | |
| - "vidplace7" | |
| - "wei633" | |
| - all | |
| default: 'all' | |
| targetsFilterInclude: | |
| description: 'TARGETS_FILTER_INCLUDE, example: "BOARD:odroidhc4,BOARD:odroidn2"' | |
| required: false | |
| default: '' | |
| type: string | |
| nightlybuild: | |
| description: 'yes = nighlty, no = stable' | |
| required: false | |
| options: [ 'yes', 'no' ] | |
| type: choice | |
| default: 'yes' | |
| bumpversion: | |
| type: boolean | |
| description: "Bump version" | |
| default: 'true' | |
| versionOverride: | |
| description: 'Version override' | |
| required: false | |
| default: '' | |
| env: | |
| # For easier reuse across the multiple chunks ('armbian/build' repo) | |
| BUILD_REPOSITORY: "armbian/build" | |
| BUILD_REF: "${{ inputs.ref || inputs.branch || 'main' }}" # branch or tag or sha1 | |
| # For easier reuse across the multiple chunks ('armbian/os' repo) | |
| USERPATCHES_REPOSITORY: "armbian/os" | |
| USERPATCHES_REF: "main" # branch or tag or sha1 | |
| USERPATCHES_DIR: "userpatches" # folder inside USERPATCHES_REPOSITORY | |
| # Github repository for releases. Normally its the one where we executing script | |
| RELEASE_REPOSITORY: "os" | |
| # Armbian envs. Adjust to your needs. | |
| # This makes builds faster, but only if the Docker images are up-to-date with all dependencies, Python, tools, etc. Otherwise it makes it... slower. | |
| DOCKER_SKIP_UPDATE: "yes" # Do not apt update/install/requirements/etc during Dockerfile build, trust that Docker images are up-to-date. | |
| # Added to every build, even the prepare job. | |
| EXTRA_PARAMS_ALL_BUILDS: "${{ inputs.extraParamsAllBuilds || 'UPLOAD_TO_OCI_ONLY=yes' }}" | |
| # Version management | |
| VERSION_OVERRIDE: "${{ github.event.inputs.versionOverride }}" | |
| VERSION_BUMP: "${{ github.event.inputs.bumpversion || 'true' }}" | |
| # To use GitHub CLI in a GitHub Actions workflow | |
| GH_TOKEN: "${{ secrets.ACCESS_TOKEN }}" | |
| # Added to every image build arguments. | |
| EXTRA_PARAMS_IMAGE: "SHARE_LOG=yes " | |
| # To ensure that only a single workflow using the same concurrency group will run at a time | |
| concurrency: | |
| group: pipeline | |
| cancel-in-progress: false | |
| jobs: | |
| # additional security check | |
| team_check: | |
| permissions: | |
| actions: write | |
| name: "Team check" | |
| runs-on: [ "ubuntu-latest" ] | |
| steps: | |
| - name: "Check membership" | |
| uses: armbian/actions/team-check@main | |
| with: | |
| ORG_MEMBERS: ${{ secrets.ORG_MEMBERS }} | |
| GITHUB_TOKEN: "${{ env.GH_TOKEN }}" | |
| TEAM: "Release manager" | |
| sources_prep: | |
| needs: team_check | |
| name: "Store sources hashes" | |
| runs-on: [ "self-hosted", "Linux", 'super' ] | |
| steps: | |
| # Clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default | |
| path: os | |
| # clone the build system repo (`armbian/build`) | |
| - name: Checkout build repo | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ env.BUILD_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: build | |
| # clone the rkbin repo (`armbian/rkbin`) | |
| - name: Checkout build repo | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: armbian/rkbin | |
| ref: master | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: rkbin | |
| # clone the build docker (`armbian/docker-armbian-build`) | |
| - name: Checkout build repo | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: armbian/docker-armbian-build | |
| ref: main | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: docker-armbian-build | |
| # clone the build system repo (`armbian/documentation`) | |
| - name: Checkout build repo | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: armbian/documentation | |
| ref: master | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: documentation | |
| # clone the armbian-config NG repo (`armbian/configng`) | |
| - name: Checkout build repo | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: armbian/configng | |
| ref: main | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: configng | |
| - name: Prepare Git_sources JSON | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| run: | | |
| cd build | |
| bash ./compile.sh targets | |
| cat output/info/git_sources.json | |
| BUILD=$(git rev-parse HEAD) | |
| cd ../documentation | |
| DOCUMENTATION=$(git rev-parse HEAD) | |
| cd ../docker-armbian-build | |
| DOCKER_ARMBIAN_BUILD=$(git rev-parse HEAD) | |
| cd ../rkbin | |
| RKBIN=$(git rev-parse HEAD) | |
| cd ../configng | |
| ARMBIANCONFIGNG=$(git rev-parse HEAD) | |
| cd .. | |
| # add build repository | |
| sed -i '0,/{/s//{\n "source": "https:\/\/github.com\/armbian\/build", \n "branch": "main", \n "sha1": "'$BUILD'"\n },\n &/' build/output/info/git_sources.json | |
| # add documentation repository | |
| sed -i '0,/{/s//{\n "source": "https:\/\/github.com\/armbian\/documentation", \n "branch": "master", \n "sha1": "'$DOCUMENTATION'"\n },\n &/' build/output/info/git_sources.json | |
| # add rkbin repository | |
| sed -i '0,/{/s//{\n "source": "https:\/\/github.com\/armbian\/rkbin", \n "branch": "master", \n "sha1": "'$RKBIN'"\n },\n &/' build/output/info/git_sources.json | |
| # add armbianconfig repository | |
| #sed -i '0,/{/s//{\n "source": "https:\/\/github.com\/armbian\/configng", \n "branch": "main", \n "sha1": "'$ARMBIANCONFIGNG'"\n },\n &/' build/output/info/git_sources.json | |
| # add docker-armbian-build repository | |
| sed -i '0,/{/s//{\n "source": "https:\/\/github.com\/armbian\/docker-armbian-build", \n "branch": "main", \n "sha1": "'$DOCKER_ARMBIAN_BUILD'"\n },\n &/' build/output/info/git_sources.json | |
| cp build/output/info/git_sources.json os/ | |
| - name: Update scripts | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( env.VERSION_BUMP == 'true' ) && ( inputs.ref == '' ) }} | |
| run: | | |
| #sudo chown -R $USER:$USER .git | |
| cd os | |
| if git status --porcelain | grep .; then | |
| git config --global user.email "info@armbian.com" | |
| git config --global user.name "Armbianworker" | |
| git config pull.rebase false | |
| git pull | |
| git add git_sources.json | |
| git commit git_sources.json --allow-empty -m "Update external GIT commits" | |
| git push | |
| fi | |
| version_prep: | |
| needs: sources_prep | |
| name: "Bump version" | |
| runs-on: ubuntu-latest | |
| steps: | |
| # Clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default. | |
| - name: Determine version | |
| id: versionfile | |
| run: | | |
| # file = where version is getting stored, different for stable and nightly | |
| # skip_tag = we only upload nighlty to GH releases | |
| echo "file=nightly" >> $GITHUB_OUTPUT | |
| echo "skip_tag=false" >> $GITHUB_OUTPUT | |
| echo "pre_release=true" >> $GITHUB_OUTPUT | |
| if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then | |
| echo "file=stable" >> $GITHUB_OUTPUT | |
| echo "skip_tag=true" >> $GITHUB_OUTPUT | |
| echo "pre_release=false" >> $GITHUB_OUTPUT | |
| fi | |
| # Bump version automatically | |
| - name: Bump version | |
| if: ${{ ( ! github.event.inputs.versionOverride ) && ( inputs.ref == '' ) && ( env.VERSION_BUMP == 'true' ) }} | |
| id: changelog | |
| uses: TriPSs/conventional-changelog-action@v5.4.0 | |
| with: | |
| github-token: ${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }} | |
| git-message: 'Bump release to {version}' | |
| git-user-name: armbianworker | |
| git-user-email: info@armbian.com | |
| output-file: 'false' | |
| skip-version-file: 'false' | |
| skip-on-empty: 'false' | |
| skip-commit: 'false' | |
| skip-ci: 'false' | |
| skip-tag: "${{ steps.versionfile.outputs.skip_tag }}" | |
| version-file: "${{ steps.versionfile.outputs.file }}.json" | |
| pre-release: "${{ steps.versionfile.outputs.pre_release }}" | |
| git-branch: 'main' | |
| tag-prefix: '' | |
| pre-release-identifier: 'trunk' | |
| - name: Read version from file if nor overriden | |
| if: ${{ ! github.event.inputs.versionOverride || env.VERSION_BUMP == 'false' }} | |
| run: | | |
| mkdir -p downloads | |
| cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" | sed 's/^/VERSION_OVERRIDE=/' >> $GITHUB_ENV | |
| cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" > downloads/version | |
| - name: 'Upload Artifact' | |
| uses: actions/upload-artifact@v5 | |
| with: | |
| name: assets-for-download-nightly | |
| path: downloads | |
| retention-days: 5 | |
| - name: "Generate body file" | |
| if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' }} | |
| run: | | |
| # Make html document | |
| if [ -f release-headers/${{ env.RELEASE_REPOSITORY }}.sh ]; then | |
| bash release-headers/${{ env.RELEASE_REPOSITORY }}.sh > body.html | |
| fi | |
| - uses: ncipollo/release-action@v1 | |
| if: ${{ (github.event.inputs.nightlybuild || 'yes') == 'yes' && (github.event.inputs.skipImages || 'no') != 'yes' }} | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ env.VERSION_OVERRIDE }}" | |
| name: "${{ env.VERSION_OVERRIDE }}" | |
| bodyFile: "body.html" | |
| prerelease: "true" | |
| allowUpdates: true | |
| removeArtifacts: true | |
| token: ${{ env.GH_TOKEN }} | |
| - name: Save | |
| id: releases | |
| run: | | |
| echo "version=${{ env.VERSION_OVERRIDE }}" >> $GITHUB_OUTPUT | |
| outputs: | |
| # not related to matrix | |
| version: ${{ steps.releases.outputs.version }} | |
| matrix_prep: | |
| name: "JSON matrix: 17/16 :: 17 artifact chunks, 16 image chunks" | |
| if: ${{ github.repository_owner == 'armbian' }} | |
| needs: [ version_prep ] | |
| runs-on: [ "self-hosted", "Linux", 'super' ] | |
| steps: | |
| # Cleaning self hosted runners | |
| - name: Runner clean | |
| uses: armbian/actions/runner-clean@main | |
| # clone the build system repo (`armbian/build`) | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ env.BUILD_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| path: build | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default. | |
| path: userpatches | |
| # clone the torrent lists | |
| - name: "Checkout torrent lists" | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: XIU2/TrackersListCollection | |
| clean: false | |
| ref: master # true is default | |
| path: trackerslist | |
| fetch-depth: 1 | |
| - name: "grab the sha1 of the latest commit of the build repo ${{ env.BUILD_REPOSITORY }}#${{ env.BUILD_REF }}" | |
| id: latest-commit | |
| run: | | |
| cd build | |
| echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT | |
| cd .. | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv build/userpatches | |
| rsync -av userpatches/${{env.USERPATCHES_DIR}}/. build/userpatches/ | |
| - name: GitHub cache | |
| id: cache-restore | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| cache/memoize | |
| cache/oci/positive | |
| key: ${{ runner.os }}-matrix-cache-${{ github.sha }}-${{ steps.latest-commit.outputs.sha1 }}" | |
| restore-keys: | | |
| ${{ runner.os }}-matrix-cache- | |
| # Login to ghcr.io, we're gonna do a lot of OCI lookups. | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
| - name: Prepare Info JSON and Matrices | |
| id: prepare-matrix | |
| run: | | |
| FILTERS="${{ github.event.inputs.targetsFilterInclude }}" | |
| if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.board }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then | |
| FILTERS='"BOARD:${{ github.event.inputs.board }}"' | |
| fi | |
| if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.maintainer }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then | |
| FILTERS='"BOARD_MAINTAINERS:${{ github.event.inputs.maintainer }}"' | |
| fi | |
| # this sets outputs "artifact-matrix" #and "image-matrix" | |
| cd build | |
| bash ./compile.sh gha-matrix armbian-images \ | |
| REVISION="${{ needs.version_prep.outputs.version }}" \ | |
| TARGETS_FILTER_INCLUDE="${FILTERS}" \ | |
| BETA=${{ github.event.inputs.nightlybuild || 'yes' }} \ | |
| CLEAN_INFO=yes \ | |
| CLEAN_MATRIX=yes \ | |
| MATRIX_ARTIFACT_CHUNKS=17 \ | |
| MATRIX_IMAGE_CHUNKS=16 \ | |
| CHECK_OCI=${{ github.event.inputs.checkOci || 'yes' }} \ | |
| TARGETS_FILENAME="targets-release-nightly.yaml" \ | |
| SKIP_IMAGES=${{ github.event.inputs.skipImages || 'no'}} \ | |
| ${{env.EXTRA_PARAMS_ALL_BUILDS}} SHARE_LOG=yes # IMAGES_ONLY_OUTDATED_ARTIFACTS=yes | |
| - name: "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}" | |
| - name: Generate server lists from NetBox (JSON) | |
| id: prepare-urls | |
| run: | | |
| set -euo pipefail | |
| mkdir -p build/output/info | |
| # copy trackers list | |
| cp trackerslist/best.txt build/output/info/best-torrent-servers.txt | |
| BASE_URL="${{ secrets.NETBOX_API }}/virtualization/virtual-machines/?limit=500&name__empty=false&status=active" | |
| # One jq filter used for all kinds – same fields everywhere | |
| JQ_FILTER=' | |
| .results | |
| | map( | |
| select(.name != null) | |
| | { | |
| host: .name, | |
| upload_path: (.custom_fields["path"] // ""), | |
| download_path_archive: | |
| ((.custom_fields["download_path_archive"] // "/archive") | |
| | if startswith("/") then . else "/" + . end), | |
| download_path_images: | |
| ((.custom_fields["download_path_images"] // "/dl") | |
| | if startswith("/") then . else "/" + . end), | |
| download_path_debs: | |
| ((.custom_fields["download_path_debs"] // "/apt") | |
| | if startswith("/") then . else "/" + . end), | |
| port: (.custom_fields["port"] // 22), | |
| username: (.custom_fields["username"] // "mirror") | |
| } | |
| ) | |
| | sort_by(.host) | |
| ' | |
| for kind in servers servers-download servers-cache servers-upload; do | |
| case "$kind" in | |
| servers) | |
| # All mirrors serving images | |
| url="$BASE_URL&device_role=Mirror&tag=images" | |
| outfile="build/output/info/servers.jq" | |
| ;; | |
| servers-download) | |
| # Mirrors used as HTTP download sources (webseeds) | |
| url="$BASE_URL&device_role=Mirror&tag=images" | |
| outfile="build/output/info/servers-download.jq" | |
| ;; | |
| servers-cache) | |
| # Cache mirrors (can be used for HTTP + rsync) | |
| url="$BASE_URL&device_role=Mirror&tag=cache" | |
| outfile="build/output/info/servers-cache.jq" | |
| ;; | |
| servers-upload) | |
| # Upload targets (SSH) | |
| url="$BASE_URL&tag=upload&tag=images" | |
| outfile="build/output/info/servers-upload.jq" | |
| ;; | |
| esac | |
| echo "Generating $outfile from $url" | |
| timeout 10 curl -s \ | |
| -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" \ | |
| -H "Accept: application/json; indent=4" \ | |
| "$url" \ | |
| | jq "$JQ_FILTER" > "$outfile" | |
| done | |
| # Store output/info folder in a GitHub Actions artifact | |
| - uses: actions/upload-artifact@v5 | |
| name: Upload output/info as GitHub Artifact | |
| with: | |
| name: build-info-json | |
| path: build/output/info | |
| - name: chown cache memoize/oci back to normal user | |
| run: sudo chown -R $USER:$USER build/cache/memoize build/cache/oci/positive | |
| outputs: | |
| # not related to matrix | |
| build-sha1: ${{ steps.latest-commit.outputs.sha1 }} | |
| version: ${{ needs.version_prep.outputs.version }} | |
| # template file: 150.per-chunk-artifacts_prep-outputs.yaml | |
| # artifacts-1 of 17 | |
| artifacts-chunk-json-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-1 }} | |
| artifacts-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-1 }} | |
| artifacts-chunk-size-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-1 }} | |
| # artifacts-2 of 17 | |
| artifacts-chunk-json-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-2 }} | |
| artifacts-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-2 }} | |
| artifacts-chunk-size-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-2 }} | |
| # artifacts-3 of 17 | |
| artifacts-chunk-json-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-3 }} | |
| artifacts-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-3 }} | |
| artifacts-chunk-size-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-3 }} | |
| # artifacts-4 of 17 | |
| artifacts-chunk-json-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-4 }} | |
| artifacts-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-4 }} | |
| artifacts-chunk-size-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-4 }} | |
| # artifacts-5 of 17 | |
| artifacts-chunk-json-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-5 }} | |
| artifacts-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-5 }} | |
| artifacts-chunk-size-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-5 }} | |
| # artifacts-6 of 17 | |
| artifacts-chunk-json-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-6 }} | |
| artifacts-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-6 }} | |
| artifacts-chunk-size-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-6 }} | |
| # artifacts-7 of 17 | |
| artifacts-chunk-json-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-7 }} | |
| artifacts-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-7 }} | |
| artifacts-chunk-size-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-7 }} | |
| # artifacts-8 of 17 | |
| artifacts-chunk-json-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-8 }} | |
| artifacts-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-8 }} | |
| artifacts-chunk-size-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-8 }} | |
| # artifacts-9 of 17 | |
| artifacts-chunk-json-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-9 }} | |
| artifacts-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-9 }} | |
| artifacts-chunk-size-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-9 }} | |
| # artifacts-10 of 17 | |
| artifacts-chunk-json-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-10 }} | |
| artifacts-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-10 }} | |
| artifacts-chunk-size-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-10 }} | |
| # artifacts-11 of 17 | |
| artifacts-chunk-json-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-11 }} | |
| artifacts-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-11 }} | |
| artifacts-chunk-size-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-11 }} | |
| # artifacts-12 of 17 | |
| artifacts-chunk-json-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-12 }} | |
| artifacts-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-12 }} | |
| artifacts-chunk-size-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-12 }} | |
| # artifacts-13 of 17 | |
| artifacts-chunk-json-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-13 }} | |
| artifacts-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-13 }} | |
| artifacts-chunk-size-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-13 }} | |
| # artifacts-14 of 17 | |
| artifacts-chunk-json-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-14 }} | |
| artifacts-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-14 }} | |
| artifacts-chunk-size-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-14 }} | |
| # artifacts-15 of 17 | |
| artifacts-chunk-json-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-15 }} | |
| artifacts-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-15 }} | |
| artifacts-chunk-size-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-15 }} | |
| # artifacts-16 of 17 | |
| artifacts-chunk-json-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-16 }} | |
| artifacts-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-16 }} | |
| artifacts-chunk-size-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-16 }} | |
| # artifacts-17 of 17 | |
| artifacts-chunk-json-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-17 }} | |
| artifacts-chunk-not-empty-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-17 }} | |
| artifacts-chunk-size-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-17 }} | |
| # template file: 151.per-chunk-images_prep-outputs.yaml | |
| # artifacts-1 of 16 | |
| images-chunk-json-1: ${{ steps.prepare-matrix.outputs.images-chunk-json-1 }} | |
| images-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-1 }} | |
| images-chunk-size-1: ${{ steps.prepare-matrix.outputs.images-chunk-size-1 }} | |
| # artifacts-2 of 16 | |
| images-chunk-json-2: ${{ steps.prepare-matrix.outputs.images-chunk-json-2 }} | |
| images-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-2 }} | |
| images-chunk-size-2: ${{ steps.prepare-matrix.outputs.images-chunk-size-2 }} | |
| # artifacts-3 of 16 | |
| images-chunk-json-3: ${{ steps.prepare-matrix.outputs.images-chunk-json-3 }} | |
| images-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-3 }} | |
| images-chunk-size-3: ${{ steps.prepare-matrix.outputs.images-chunk-size-3 }} | |
| # artifacts-4 of 16 | |
| images-chunk-json-4: ${{ steps.prepare-matrix.outputs.images-chunk-json-4 }} | |
| images-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-4 }} | |
| images-chunk-size-4: ${{ steps.prepare-matrix.outputs.images-chunk-size-4 }} | |
| # artifacts-5 of 16 | |
| images-chunk-json-5: ${{ steps.prepare-matrix.outputs.images-chunk-json-5 }} | |
| images-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-5 }} | |
| images-chunk-size-5: ${{ steps.prepare-matrix.outputs.images-chunk-size-5 }} | |
| # artifacts-6 of 16 | |
| images-chunk-json-6: ${{ steps.prepare-matrix.outputs.images-chunk-json-6 }} | |
| images-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-6 }} | |
| images-chunk-size-6: ${{ steps.prepare-matrix.outputs.images-chunk-size-6 }} | |
| # artifacts-7 of 16 | |
| images-chunk-json-7: ${{ steps.prepare-matrix.outputs.images-chunk-json-7 }} | |
| images-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-7 }} | |
| images-chunk-size-7: ${{ steps.prepare-matrix.outputs.images-chunk-size-7 }} | |
| # artifacts-8 of 16 | |
| images-chunk-json-8: ${{ steps.prepare-matrix.outputs.images-chunk-json-8 }} | |
| images-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-8 }} | |
| images-chunk-size-8: ${{ steps.prepare-matrix.outputs.images-chunk-size-8 }} | |
| # artifacts-9 of 16 | |
| images-chunk-json-9: ${{ steps.prepare-matrix.outputs.images-chunk-json-9 }} | |
| images-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-9 }} | |
| images-chunk-size-9: ${{ steps.prepare-matrix.outputs.images-chunk-size-9 }} | |
| # artifacts-10 of 16 | |
| images-chunk-json-10: ${{ steps.prepare-matrix.outputs.images-chunk-json-10 }} | |
| images-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-10 }} | |
| images-chunk-size-10: ${{ steps.prepare-matrix.outputs.images-chunk-size-10 }} | |
| # artifacts-11 of 16 | |
| images-chunk-json-11: ${{ steps.prepare-matrix.outputs.images-chunk-json-11 }} | |
| images-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-11 }} | |
| images-chunk-size-11: ${{ steps.prepare-matrix.outputs.images-chunk-size-11 }} | |
| # artifacts-12 of 16 | |
| images-chunk-json-12: ${{ steps.prepare-matrix.outputs.images-chunk-json-12 }} | |
| images-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-12 }} | |
| images-chunk-size-12: ${{ steps.prepare-matrix.outputs.images-chunk-size-12 }} | |
| # artifacts-13 of 16 | |
| images-chunk-json-13: ${{ steps.prepare-matrix.outputs.images-chunk-json-13 }} | |
| images-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-13 }} | |
| images-chunk-size-13: ${{ steps.prepare-matrix.outputs.images-chunk-size-13 }} | |
| # artifacts-14 of 16 | |
| images-chunk-json-14: ${{ steps.prepare-matrix.outputs.images-chunk-json-14 }} | |
| images-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-14 }} | |
| images-chunk-size-14: ${{ steps.prepare-matrix.outputs.images-chunk-size-14 }} | |
| # artifacts-15 of 16 | |
| images-chunk-json-15: ${{ steps.prepare-matrix.outputs.images-chunk-json-15 }} | |
| images-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-15 }} | |
| images-chunk-size-15: ${{ steps.prepare-matrix.outputs.images-chunk-size-15 }} | |
| # artifacts-16 of 16 | |
| images-chunk-json-16: ${{ steps.prepare-matrix.outputs.images-chunk-json-16 }} | |
| images-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-16 }} | |
| images-chunk-size-16: ${{ steps.prepare-matrix.outputs.images-chunk-size-16 }} | |
| # template file: 250.single_aggr-jobs.yaml | |
| # ------ aggregate all artifact chunks into a single dependency ------- | |
| all-artifacts-ready: | |
| name: "17 artifacts chunks ready" | |
| runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
| if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
| needs: [ "matrix_prep", "build-artifacts-chunk-1","build-artifacts-chunk-2","build-artifacts-chunk-3","build-artifacts-chunk-4","build-artifacts-chunk-5","build-artifacts-chunk-6","build-artifacts-chunk-7","build-artifacts-chunk-8","build-artifacts-chunk-9","build-artifacts-chunk-10","build-artifacts-chunk-11","build-artifacts-chunk-12","build-artifacts-chunk-13","build-artifacts-chunk-14","build-artifacts-chunk-15","build-artifacts-chunk-16","build-artifacts-chunk-17" ] # <-- HERE: all artifact chunk numbers. | |
| steps: | |
| - name: fake step | |
| run: uptime | |
| all-images-ready: | |
| name: "16 image chunks ready" | |
| runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
| if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
| needs: [ "matrix_prep", "build-images-chunk-1","build-images-chunk-2","build-images-chunk-3","build-images-chunk-4","build-images-chunk-5","build-images-chunk-6","build-images-chunk-7","build-images-chunk-8","build-images-chunk-9","build-images-chunk-10","build-images-chunk-11","build-images-chunk-12","build-images-chunk-13","build-images-chunk-14","build-images-chunk-15","build-images-chunk-16" ] # <-- HERE: all image chunk numbers. | |
| steps: | |
| - name: fake step | |
| run: uptime | |
| all-artifacts-and-images-ready: | |
| name: "17 artifacts and 16 image chunks ready" | |
| runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
| if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
| needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ] | |
| steps: | |
| - name: fake step | |
| run: uptime | |
| # template file: 550.per-chunk-artifacts_job.yaml | |
| "build-artifacts-chunk-1": # templated "build-artifacts-chunk-1" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-1 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-1) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A1' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-2": # templated "build-artifacts-chunk-2" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-2 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-2) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A2' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-3": # templated "build-artifacts-chunk-3" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-3 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-3) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A3' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-4": # templated "build-artifacts-chunk-4" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-4 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-4) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A4' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-5": # templated "build-artifacts-chunk-5" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-5 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-5) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A5' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-6": # templated "build-artifacts-chunk-6" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-6 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-6) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A6' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-7": # templated "build-artifacts-chunk-7" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-7 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-7) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A7' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-8": # templated "build-artifacts-chunk-8" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-8 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-8) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A8' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-9": # templated "build-artifacts-chunk-9" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-9 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-9) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A9' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-10": # templated "build-artifacts-chunk-10" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-10 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-10) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A10' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-11": # templated "build-artifacts-chunk-11" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-11 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-11) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A11' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-12": # templated "build-artifacts-chunk-12" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-12 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-12) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A12' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-13": # templated "build-artifacts-chunk-13" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-13 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-13) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A13' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-14": # templated "build-artifacts-chunk-14" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-14 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-14) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A14' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-15": # templated "build-artifacts-chunk-15" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-15 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-15) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A15' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-16": # templated "build-artifacts-chunk-16" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-16 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-16) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A16' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| "build-artifacts-chunk-17": # templated "build-artifacts-chunk-17" | |
| if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-17 == 'yes' }} # <-- HERE: Chunk number. | |
| needs: [ "matrix_prep" ] | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-17) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty A17' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: Build ${{matrix.desc}} | |
| timeout-minutes: 90 | |
| id: build | |
| run: | | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
| # template file: 650.per-chunk-images_job.yaml | |
| "build-images-chunk-1": # templated "build-images-chunk-1" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-1 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-1) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I1' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-2": # templated "build-images-chunk-2" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-2 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-2) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I2' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-3": # templated "build-images-chunk-3" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-3 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-3) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I3' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-4": # templated "build-images-chunk-4" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-4 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-4) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I4' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-5": # templated "build-images-chunk-5" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-5 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-5) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I5' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-6": # templated "build-images-chunk-6" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-6 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-6) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I6' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-7": # templated "build-images-chunk-7" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-7 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-7) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I7' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-8": # templated "build-images-chunk-8" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-8 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-8) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I8' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-9": # templated "build-images-chunk-9" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-9 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-9) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I9' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-10": # templated "build-images-chunk-10" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-10 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-10) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I10' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-11": # templated "build-images-chunk-11" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-11 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-11) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I11' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-12": # templated "build-images-chunk-12" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-12 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-12) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I12' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-13": # templated "build-images-chunk-13" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-13 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-13) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I13' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-14": # templated "build-images-chunk-14" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-14 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-14) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I14' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-15": # templated "build-images-chunk-15" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-15 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-15) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I15' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| "build-images-chunk-16": # templated "build-images-chunk-16" | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| timeout-minutes: 240 | |
| if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-16 == 'yes' ) }} # <-- HERE: Chunk number. | |
| strategy: | |
| fail-fast: false # let other jobs try to complete if one fails | |
| matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-16) }} # <-- HERE: Chunk number. | |
| name: ${{ matrix.desc || 'Empty I16' }} # <-- HERE: Chunk number. | |
| runs-on: ${{ matrix.runs_on }} | |
| steps: | |
| - name: Install dependencies | |
| run: | | |
| if [ ! -e /usr/bin/mktorrent ]; then | |
| sudo apt-get update | |
| sudo apt-get install -y mktorrent | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: ${{ matrix.fdepth }} | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| rm -rf userpatches.repo | |
| - name: "Cleanup leftover output" | |
| run: | | |
| rm -f userpatches/VERSION | |
| - name: ${{matrix.desc}} | |
| id: build-one-image | |
| timeout-minutes: 90 | |
| run: | | |
| # calculate loop from runner name | |
| if [ -z "${ImageOS}" ]; then | |
| USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/') | |
| fi | |
| bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| if: always() | |
| run: | | |
| echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: Check API rate limits | |
| run: | | |
| # install dependencies | |
| if ! command -v "gh" > /dev/null 2>&1; then | |
| sudo apt-get -y -qq install gh | |
| fi | |
| while true | |
| do | |
| API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit') | |
| API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining') | |
| PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL )) | |
| if (( $PERCENT > 20 )); then | |
| echo "API rate in good shape $PERCENT % free" | |
| exit 0 | |
| fi | |
| echo "API rate lower then 20%, sleping 10m" | |
| sleep 10m | |
| done | |
| # show current api rate | |
| curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit | |
| - name: Import GPG key | |
| env: | |
| GPG_KEY1: ${{ secrets.GPG_KEY1 }} | |
| if: env.GPG_KEY1 != null | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Sign | |
| env: | |
| GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }} | |
| if: env.GPG_PASSPHRASE1 != null | |
| run: | | |
| for extension in zip xz qcow2; do | |
| if ls -l output/images/*/archive/*.$extension &>/dev/null; then | |
| echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension | |
| fi | |
| done | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| - name: Generate torrent | |
| timeout-minutes: 3 | |
| run: | | |
| set -euo pipefail | |
| # Build tracker list (ignore empty/whitespace-only lines) | |
| TRACKERS=$( | |
| grep -v '^[ ]*$' output/info/best-torrent-servers.txt \ | |
| | sort -R \ | |
| | sed 's/^/ --announce=/' | |
| ) | |
| # Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/) | |
| BOARD="" | |
| FILE="" | |
| first_match="" | |
| for ext in zip xz qcow2; do | |
| if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then | |
| first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1) | |
| # first_match = output/images/BOARD/archive/file.ext | |
| BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD | |
| FILE=$(basename "$first_match") | |
| break | |
| fi | |
| done | |
| # Safety check | |
| if [ -z "$BOARD" ] || [ -z "$FILE" ]; then | |
| echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2 | |
| exit 1 | |
| fi | |
| # Nightly / stable logic (templated) | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| WEBSEEDS="" | |
| if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then | |
| ################################################################## | |
| # STABLE RELEASES | |
| # Use download mirrors (servers-download.jq) | |
| # URL: https://<host><download_path_images>/<BOARD>/archive/<FILE> | |
| # download_path_images is already normalized & defaults to /dl in JSON. | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r --arg board "$BOARD" --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)" | |
| ' output/info/servers-download.jq | paste -sd, - | |
| ) | |
| else | |
| ################################################################## | |
| # NIGHTLY BUILDS | |
| # Use cache mirrors (servers-cache.jq) + GitHub as extra webseed | |
| # Path is FIXED: https://SERVER/cache/os/<version>/<FILE> | |
| ################################################################## | |
| WEBSEEDS=$( | |
| jq -r \ | |
| --arg repo "os" \ | |
| --arg ver "${{ needs.matrix_prep.outputs.version }}" \ | |
| --arg file "$FILE" ' | |
| .[] | |
| | "https://\(.host)/cache/\($repo)/\($ver)/\($file)" | |
| ' output/info/servers-cache.jq | paste -sd, - | |
| ) | |
| # Append GitHub webseed | |
| if [ -n "$WEBSEEDS" ]; then | |
| WEBSEEDS+="," | |
| fi | |
| WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}" | |
| fi | |
| echo "WEBSEEDS: $WEBSEEDS" | |
| # Go to the archive directory that contains FILE | |
| cd "$(dirname "$first_match")" || exit 1 | |
| mktorrent \ | |
| --comment="Armbian torrent for ${FILE}" \ | |
| --verbose \ | |
| ${TRACKERS} \ | |
| --web-seed="${WEBSEEDS}" \ | |
| "${FILE}" | |
| # drop .txt helper files | |
| rm -f *.txt | |
| - name: "Prepare release artifacts (exclude .asc, .sha, .torrent)" | |
| run: | | |
| # Start from a clean directory | |
| rm -rf output/release | |
| mkdir -p output/release | |
| # Copy wanted artifacts from output/images, preserving folder structure | |
| # e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/... | |
| find output/images -type f -name 'Armbian_*.*' \ | |
| ! -name '*.asc' \ | |
| ! -name '*.sha' \ | |
| ! -name '*.torrent' \ | |
| -exec cp --parents {} output/release/ \; | |
| # debug | |
| tree output/images | |
| echo "# debug" | |
| tree output/release | |
| - name: "Upload artefacts except .asc, .sha and .torrent" | |
| timeout-minutes: 60 | |
| if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }} | |
| uses: ncipollo/release-action@v1 | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ needs.matrix_prep.outputs.version }}" | |
| artifacts: "output/release/output/images/*/*/Armbian_*.*" | |
| omitBody: true | |
| replacesArtifacts: true | |
| omitName: true | |
| makeLatest: false | |
| omitPrereleaseDuringUpdate: true | |
| allowUpdates: true | |
| artifactErrorsFailBuild: true | |
| token: "${{ env.GH_TOKEN }}" | |
| - name: "Upload to servers" | |
| run: | | |
| # debug | |
| echo "=== servers-cache.jq ===" | |
| jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq | |
| echo "=== servers-upload.jq ===" | |
| jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq | |
| max_retries=3 | |
| sync_from_json() { | |
| local json_file=$1 | |
| local mode=$2 # "cache" or "upload" | |
| echo "== Processing ${json_file} (mode: ${mode}) ==" | |
| # Iterate over JSON array elements | |
| while IFS= read -r server; do | |
| # JSON structure (same for all files): | |
| # { | |
| # "host": "...", | |
| # "upload_path": "...", | |
| # "download_path_archive": "...", | |
| # "download_path_images": "...", | |
| # "download_path_debs": "...", | |
| # "port": 22, | |
| # "username": "mirror" | |
| # } | |
| SERVER_URL=$(jq -r '.host // empty' <<<"$server") | |
| SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server") | |
| SERVER_PORT=$(jq -r '.port // 22' <<<"$server") | |
| SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server") | |
| # skip empty host | |
| [ -z "$SERVER_URL" ] && continue | |
| echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)" | |
| # Clean known_hosts entry (host:port form) | |
| ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true | |
| # Select rsync filters + remote subdir | |
| if [ "$mode" = "cache" ]; then | |
| # only.sha, .torrent .asc | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*.sha' | |
| --include='*.asc' | |
| --include='*.torrent' | |
| --exclude='*' | |
| ) | |
| REMOTE_SUBDIR="cache/artifacts/" | |
| else | |
| # everything | |
| RSYNC_FILTER=( | |
| --include='*/' | |
| --include='*' | |
| ) | |
| REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/" | |
| fi | |
| # Retry loop | |
| for attempt in $(seq 1 "$max_retries"); do | |
| echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..." | |
| if rsync --progress \ | |
| -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \ | |
| -rvP \ | |
| "${RSYNC_FILTER[@]}" \ | |
| output/images/ \ | |
| "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}" | |
| then | |
| echo "[$SERVER_URL] rsync successful." | |
| break | |
| fi | |
| if [ "$attempt" -eq "$max_retries" ]; then | |
| echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts." | |
| exit 1 | |
| fi | |
| echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..." | |
| sleep 10 | |
| done | |
| done < <(jq -c '.[]' "$json_file") | |
| } | |
| nightlybuild="${{ github.event.inputs.nightlybuild }}" | |
| nightlybuild_default="'yes'" | |
| RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}" | |
| effective_nightlybuild="${nightlybuild:-$nightlybuild_default}" | |
| if [ "$effective_nightlybuild" = "yes" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "community" ] || \ | |
| [ "$RELEASE_REPOSITORY" = "distribution" ]; then | |
| # Upload to cache servers: only .xz/.sha/.torrent | |
| sync_from_json output/info/servers-cache.jq cache | |
| fi | |
| if [ "$effective_nightlybuild" = "no" ] && \ | |
| [ "$RELEASE_REPOSITORY" = "os" ]; then | |
| # Upload to servers: everything except .xz/.sha/.torrent | |
| sync_from_json output/info/servers-upload.jq upload | |
| fi | |
| # cleaning self hosted runners | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| if: always() | |
| uses: armbian/actions/runner-clean@main | |
| # template file: 750.single_repo.yaml | |
| # ------ publish packages to repository ------- | |
| publish-debs-to-repo: | |
| name: "Download artifacts from ORAS cache" | |
| runs-on: [ repository ] | |
| if: ${{ !failure() && !cancelled() && github.event.inputs.targetsFilterInclude == '' && inputs.ref == '' }} # eg: run if dependencies worked. See https://github.com/orgs/community/discussions/45058#discussioncomment-4817378 | |
| needs: [ "matrix_prep", "all-artifacts-ready" ] | |
| steps: | |
| - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}" | |
| uses: armbian/actions/runner-clean@main | |
| # Prepare dependencies. | |
| - name: Install dependencies | |
| run: | | |
| missing=() | |
| [ -x /usr/bin/gpg ] || missing+=("gnupg2") | |
| [ -x /usr/bin/reprepro ] || missing+=("reprepro") | |
| [ -x /usr/bin/lftp ] || missing+=("lftp") | |
| if [ ${#missing[@]} -gt 0 ]; then | |
| echo "Installing missing packages: ${missing[*]}" | |
| sudo apt-get update | |
| sudo apt-get install -y "${missing[@]}" | |
| else | |
| echo "All required packages already installed." | |
| fi | |
| # Login to ghcr.io, for later uploading rootfs to ghcr.io | |
| - name: Docker Login to GitHub Container Registry | |
| uses: docker/login-action@v3 | |
| with: | |
| registry: ghcr.io | |
| username: "${{ github.repository_owner }}" # GitHub username or org | |
| password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access. | |
| # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
| - name: Cleanup userpatches repo | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: rm -rf userpatches.repo | |
| - name: Checkout build repo | |
| uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
| with: | |
| repository: ${{ env.BUILD_REPOSITORY }} | |
| ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
| fetch-depth: 0 | |
| clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
| # clone the userpatches repo (`armbian/os`) | |
| - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
| uses: actions/checkout@v6 | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| with: | |
| repository: ${{ env.USERPATCHES_REPOSITORY }} | |
| ref: ${{ env.USERPATCHES_REF }} | |
| fetch-depth: 0 | |
| clean: false # true is default. | |
| path: userpatches.repo | |
| - name: "Put userpatches in place, and remove userpatches repo" | |
| if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
| run: | | |
| mkdir -pv userpatches | |
| rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
| #rm -rf userpatches.repo | |
| # Clean off output/info, if any | |
| # Clean off debs and debs-beta | |
| - name: Cleanup output/info | |
| run: | | |
| rm -rfv output/info output/debs output/debs-beta | |
| mkdir -pv output | |
| # Download the artifacts (output/info) produced by the prepare-matrix job. | |
| - name: Download artifacts | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: build-info-json | |
| path: output/info | |
| # List the artifacts we downloaded | |
| - name: List artifacts | |
| run: | | |
| ls -laht output/info | |
| - name: Download the debs | |
| id: download-debs | |
| run: | | |
| bash ./compile.sh debs-to-repo-download REVISION="${{ needs.matrix_prep.outputs.version }}" BETA=${{ github.event.inputs.nightlybuild || 'yes' }} SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
| - name: Import GPG key | |
| uses: crazy-max/ghaction-import-gpg@v6 | |
| with: | |
| gpg_private_key: ${{ secrets.GPG_KEY1 }} | |
| passphrase: ${{ secrets.GPG_PASSPHRASE1 }} | |
| - name: Install SSH key | |
| uses: shimataro/ssh-key-action@v2 | |
| with: | |
| key: ${{ secrets.KEY_UPLOAD }} | |
| known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }} | |
| if_key_exists: replace | |
| - name: "Fix permissions and sync artifacts" | |
| run: | | |
| set -euo pipefail | |
| echo "Fix permissions on output/" | |
| sudo chown -R "$USER:$USER" output/. | |
| # Decide target subfolder | |
| TARGET="" | |
| if [ "${{ github.event.inputs.skipImages }}" = "no" ] || [ "'no'" = "no" ]; then | |
| echo "Save to user's partial folder" | |
| TARGET="partial/${{ github.actor }}/" | |
| fi | |
| if [ "$GITHUB_WORKFLOW" = "Build All Stable Artifacts (cronjob)" ]; then | |
| echo "Nightly workflow detected" | |
| TARGET="stable-daily/" | |
| fi | |
| echo "Fix permissions on /incoming" | |
| sudo chown -R "${{ secrets.HOST_UPLOAD_USER }}:${{ secrets.HOST_UPLOAD_USER }}" /incoming/. | |
| echo "Sync all parts (only debs***)" | |
| rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }} -o StrictHostKeyChecking=accept-new" \ | |
| -arvc \ | |
| --include='debs***' \ | |
| --exclude='*' \ | |
| --remove-source-files \ | |
| --delete \ | |
| output/ \ | |
| "${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:storage/${TARGET}" | |
| echo "Clean up empty directories in output/" | |
| find output/. -type d -empty -delete | |
| - name: "Run repository update action" | |
| if: ${{ (github.event.inputs.skipImages || 'no') == 'yes' }} | |
| uses: peter-evans/repository-dispatch@v4 | |
| with: | |
| token: ${{ secrets.DISPATCH }} | |
| repository: armbian/os | |
| event-type: "Repository update" | |
| - name: "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}" | |
| run: | | |
| echo "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}" | |
| outputs: | |
| # not related to matrix | |
| version: ${{ needs.matrix_prep.outputs.version }} | |
| # template file: 950.single_footer.yaml | |
| # ------ aggregate all artifact chunks into a single dependency ------- | |
| closing: | |
| name: "Footer" | |
| runs-on: ubuntu-latest | |
| if: ${{ !failure() && !cancelled() && inputs.ref == '' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }} | |
| needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ] | |
| steps: | |
| # Download workflow artifacts | |
| - name: "Download all workflow run artifacts" | |
| if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' }} | |
| uses: actions/download-artifact@v6 | |
| with: | |
| name: assets-for-download-nightly | |
| path: downloads | |
| # Read version | |
| - name: "Read version" | |
| run: | | |
| echo "version=$(cat downloads/version 2>/dev/null || true)" >> $GITHUB_ENV | |
| # Delete artifacts | |
| - uses: geekyeggo/delete-artifact@v5 | |
| with: | |
| name: assets-for-download-nightly | |
| failOnError: false | |
| # Cleaning logs | |
| - name: "Keep only 30 days of workflow logs" | |
| uses: igorjs/gh-actions-clean-workflow@v7 | |
| with: | |
| token: "${{ env.GH_TOKEN }}" | |
| runs_older_than: 30 # optional | |
| runs_to_keep: 0 # optional | |
| # Switch pre-release to release | |
| - uses: ncipollo/release-action@v1 | |
| if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }} | |
| with: | |
| repo: "${{ env.RELEASE_REPOSITORY }}" | |
| tag: "${{ env.version }}" | |
| omitBody: true | |
| omitName: true | |
| allowUpdates: true | |
| makeLatest: true | |
| token: "${{ env.GH_TOKEN }}" | |
| # Run repository mirroring to CDN | |
| - name: "Run repository mirroring to CDN" | |
| if: ${{ (github.event.inputs.skipImages || 'no') == 'no' }} | |
| uses: peter-evans/repository-dispatch@v4 | |
| with: | |
| token: ${{ secrets.DISPATCH }} | |
| repository: armbian/armbian.github.io | |
| event-type: "Mirror" | |
| client-payload: '{"pull_repository": "armbian/${{ env.RELEASE_REPOSITORY }}", "cdn_tag": "${{ env.RELEASE_REPOSITORY }}"}' | |
| # Run webindex update action | |
| - name: "Run webindex update action" | |
| if: ${{ (github.event.inputs.skipImages || 'no') == 'no' }} | |
| uses: peter-evans/repository-dispatch@v4 | |
| with: | |
| token: ${{ secrets.DISPATCH }} | |
| repository: armbian/armbian.github.io | |
| event-type: "Webindex update" |