From 94355f09c718b0dea7fa1ae077df80b2ddcf7227 Mon Sep 17 00:00:00 2001
From: yuweiyuan8 <871282669@qq.com>
Date: Tue, 6 Feb 2024 08:48:22 +0800
Subject: [PATCH 01/10] add redmi k30 pro
---
.../sm8250-6.7/0031-add-lmi-csot-panel.patch | 467 +
.../0032-add-lmi-touchscreen-driver.patch | 38018 ++++++++++++++++
.../sm8250-6.7/dt/sm8250-xiaomi-lmi.dts | 977 +
3 files changed, 39462 insertions(+)
create mode 100644 patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch
create mode 100644 patch/kernel/archive/sm8250-6.7/0032-add-lmi-touchscreen-driver.patch
create mode 100644 patch/kernel/archive/sm8250-6.7/dt/sm8250-xiaomi-lmi.dts
diff --git a/patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch b/patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch
new file mode 100644
index 000000000000..571373652a1d
--- /dev/null
+++ b/patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch
@@ -0,0 +1,467 @@
+From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
+From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com>
+Date: Mon, 29 Jan 2024 19:05:12 +0800
+Subject: Patching kernel arm64 files drivers/gpu/drm/panel/Kconfig
+ drivers/gpu/drm/panel/Makefile drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
+
+Signed-off-by: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com>
+---
+ drivers/gpu/drm/panel/Kconfig | 9 +
+ drivers/gpu/drm/panel/Makefile | 1 +
+ drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c | 416 ++++++++++
+ 3 files changed, 426 insertions(+)
+
+diff --git a/drivers/gpu/drm/panel/Kconfig b/drivers/gpu/drm/panel/Kconfig
+index 99e14dc212ec..2ab846e5cfaf 100644
+--- a/drivers/gpu/drm/panel/Kconfig
++++ b/drivers/gpu/drm/panel/Kconfig
+@@ -8,6 +8,15 @@ config DRM_PANEL
+ menu "Display Panels"
+ depends on DRM && DRM_PANEL
+
++config DRM_PANEL_XIAOMI_LMI_CSOT
++ tristate "Redmi k30 pro (lmi) 1080x2340 CSOT AMOLED panel"
++ depends on OF
++ depends on DRM_MIPI_DSI
++ depends on BACKLIGHT_CLASS_DEVICE
++ help
++ Say Y here if you want to enable support for the CSOT 1080x2340
++ dsc cmd mode panel as found in Xiaomi Mi 10 devices.
++
+ config DRM_PANEL_ABT_Y030XX067A
+ tristate "ABT Y030XX067A 320x480 LCD panel"
+ depends on OF && SPI
+diff --git a/drivers/gpu/drm/panel/Makefile b/drivers/gpu/drm/panel/Makefile
+index d10c3de51c6d..687a239df8b1 100644
+--- a/drivers/gpu/drm/panel/Makefile
++++ b/drivers/gpu/drm/panel/Makefile
+@@ -88,3 +88,4 @@ obj-$(CONFIG_DRM_PANEL_VISIONOX_VTDR6130) += panel-visionox-vtdr6130.o
+ obj-$(CONFIG_DRM_PANEL_VISIONOX_R66451) += panel-visionox-r66451.o
+ obj-$(CONFIG_DRM_PANEL_WIDECHIPS_WS2401) += panel-widechips-ws2401.o
+ obj-$(CONFIG_DRM_PANEL_XINPENG_XPP055C272) += panel-xinpeng-xpp055c272.o
++obj-$(CONFIG_DRM_PANEL_XIAOMI_LMI_CSOT) += panel-xiaomi-lmi-csot.o
+diff --git a/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c b/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
+new file mode 100644
+index 000000000000..5e1d3ec9c08d
+--- /dev/null
++++ b/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
+@@ -0,0 +1,416 @@
++// SPDX-License-Identifier: GPL-2.0-only
++// Copyright (c) 2024 FIXME
++// Generated with linux-mdss-dsi-panel-driver-generator from vendor device tree:
++// Copyright (c) 2013, The Linux Foundation. All rights reserved. (FIXME)
++
++#include
++#include
++#include
++#include
++#include
++
++#include
" > body.html
+
+ - uses: ncipollo/release-action@v1
+ if: ${{ (github.event.inputs.nightlybuild || 'yes') == 'yes' && (github.event.inputs.skipImages || 'yes') != 'yes' }}
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ tag: "${{ env.VERSION_OVERRIDE }}"
+ name: "${{ env.VERSION_OVERRIDE }}"
+ bodyFile: "body.html"
+ prerelease: "true"
+ allowUpdates: true
+ removeArtifacts: true
+ token: ${{ env.GH_TOKEN }}
+
+ - name: Save
+ id: releases
+ run: |
+
+ echo "version=${{ env.VERSION_OVERRIDE }}" >> $GITHUB_OUTPUT
+
+ outputs:
+
+ # not related to matrix
+ version: ${{ steps.releases.outputs.version }}
+
+ matrix_prep:
+ name: "JSON matrix: 17/16 :: 17 artifact chunks, 16 image chunks"
+ if: ${{ github.repository_owner == 'armbian' }}
+ needs: [ version_prep ]
+ runs-on: [ "self-hosted", "Linux", 'alfa' ]
+ steps:
+
+ # Cleaning self hosted runners
+ - name: Runner clean
+ uses: armbian/actions/runner-clean@main
+
+ # clone the build system repo (`armbian/build`)
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ env.BUILD_REF }}
+ fetch-depth: 0
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+ path: build
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: 0
+ clean: false # true is default.
+ path: userpatches
+
+ - name: "grab the sha1 of the latest commit of the build repo ${{ env.BUILD_REPOSITORY }}#${{ env.BUILD_REF }}"
+ id: latest-commit
+ run: |
+ cd build
+ echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
+ cd ..
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+
+ mkdir -pv build/userpatches
+ rsync -av userpatches/${{env.USERPATCHES_DIR}}/. build/userpatches/
+
+ - name: GitHub cache
+ id: cache-restore
+ uses: actions/cache@v4
+ with:
+ path: |
+ cache/memoize
+ cache/oci/positive
+ key: ${{ runner.os }}-matrix-cache-${{ github.sha }}-${{ steps.latest-commit.outputs.sha1 }}"
+ restore-keys: |
+ ${{ runner.os }}-matrix-cache-
+
+ # Login to ghcr.io, we're gonna do a lot of OCI lookups.
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access.
+
+ - name: Prepare Info JSON and Matrices
+ id: prepare-matrix
+ run: |
+
+ FILTERS="${{ github.event.inputs.targetsFilterInclude }}"
+
+ if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.board }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
+ FILTERS='"BOARD:${{ github.event.inputs.board }}"'
+ fi
+
+ if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.maintainer }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
+ FILTERS='"BOARD_MAINTAINERS:${{ github.event.inputs.maintainer }}"'
+ fi
+
+ # this sets outputs "artifact-matrix" #and "image-matrix"
+ cd build
+ bash ./compile.sh gha-matrix armbian-images \
+ REVISION="${{ needs.version_prep.outputs.version }}" \
+ TARGETS_FILTER_INCLUDE="${FILTERS}" \
+ BETA=${{ github.event.inputs.nightlybuild || 'yes' }} \
+ CLEAN_INFO=yes \
+ CLEAN_MATRIX=yes \
+ MATRIX_ARTIFACT_CHUNKS=17 \
+ MATRIX_IMAGE_CHUNKS=16 \
+ CHECK_OCI=${{ github.event.inputs.checkOci || 'yes' }} \
+ TARGETS_FILENAME="targets-all-not-eos.yaml" \
+ SKIP_IMAGES=${{ github.event.inputs.skipImages || 'yes'}} \
+ ${{env.EXTRA_PARAMS_ALL_BUILDS}} SHARE_LOG=yes # IMAGES_ONLY_OUTDATED_ARTIFACTS=yes
+
+ - name: "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
+
+ # Store output/info folder in a GitHub Actions artifact
+ - uses: actions/upload-artifact@v4
+ name: Upload output/info as GitHub Artifact
+ with:
+ name: build-info-json
+ path: build/output/info
+
+ - name: chown cache memoize/oci back to normal user
+ run: sudo chown -R $USER:$USER build/cache/memoize build/cache/oci/positive
+
+ outputs:
+
+ # not related to matrix
+ build-sha1: ${{ steps.latest-commit.outputs.sha1 }}
+ version: ${{ needs.version_prep.outputs.version }}
+
+# template file: 150.per-chunk-artifacts_prep-outputs.yaml
+
+ # artifacts-1 of 17
+ artifacts-chunk-json-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-1 }}
+ artifacts-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-1 }}
+ artifacts-chunk-size-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-1 }}
+ # artifacts-2 of 17
+ artifacts-chunk-json-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-2 }}
+ artifacts-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-2 }}
+ artifacts-chunk-size-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-2 }}
+ # artifacts-3 of 17
+ artifacts-chunk-json-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-3 }}
+ artifacts-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-3 }}
+ artifacts-chunk-size-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-3 }}
+ # artifacts-4 of 17
+ artifacts-chunk-json-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-4 }}
+ artifacts-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-4 }}
+ artifacts-chunk-size-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-4 }}
+ # artifacts-5 of 17
+ artifacts-chunk-json-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-5 }}
+ artifacts-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-5 }}
+ artifacts-chunk-size-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-5 }}
+ # artifacts-6 of 17
+ artifacts-chunk-json-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-6 }}
+ artifacts-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-6 }}
+ artifacts-chunk-size-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-6 }}
+ # artifacts-7 of 17
+ artifacts-chunk-json-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-7 }}
+ artifacts-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-7 }}
+ artifacts-chunk-size-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-7 }}
+ # artifacts-8 of 17
+ artifacts-chunk-json-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-8 }}
+ artifacts-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-8 }}
+ artifacts-chunk-size-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-8 }}
+ # artifacts-9 of 17
+ artifacts-chunk-json-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-9 }}
+ artifacts-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-9 }}
+ artifacts-chunk-size-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-9 }}
+ # artifacts-10 of 17
+ artifacts-chunk-json-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-10 }}
+ artifacts-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-10 }}
+ artifacts-chunk-size-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-10 }}
+ # artifacts-11 of 17
+ artifacts-chunk-json-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-11 }}
+ artifacts-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-11 }}
+ artifacts-chunk-size-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-11 }}
+ # artifacts-12 of 17
+ artifacts-chunk-json-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-12 }}
+ artifacts-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-12 }}
+ artifacts-chunk-size-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-12 }}
+ # artifacts-13 of 17
+ artifacts-chunk-json-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-13 }}
+ artifacts-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-13 }}
+ artifacts-chunk-size-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-13 }}
+ # artifacts-14 of 17
+ artifacts-chunk-json-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-14 }}
+ artifacts-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-14 }}
+ artifacts-chunk-size-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-14 }}
+ # artifacts-15 of 17
+ artifacts-chunk-json-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-15 }}
+ artifacts-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-15 }}
+ artifacts-chunk-size-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-15 }}
+ # artifacts-16 of 17
+ artifacts-chunk-json-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-16 }}
+ artifacts-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-16 }}
+ artifacts-chunk-size-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-16 }}
+ # artifacts-17 of 17
+ artifacts-chunk-json-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-17 }}
+ artifacts-chunk-not-empty-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-17 }}
+ artifacts-chunk-size-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-17 }}
+
+# template file: 151.per-chunk-images_prep-outputs.yaml
+
+ # artifacts-1 of 16
+ images-chunk-json-1: ${{ steps.prepare-matrix.outputs.images-chunk-json-1 }}
+ images-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-1 }}
+ images-chunk-size-1: ${{ steps.prepare-matrix.outputs.images-chunk-size-1 }}
+ # artifacts-2 of 16
+ images-chunk-json-2: ${{ steps.prepare-matrix.outputs.images-chunk-json-2 }}
+ images-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-2 }}
+ images-chunk-size-2: ${{ steps.prepare-matrix.outputs.images-chunk-size-2 }}
+ # artifacts-3 of 16
+ images-chunk-json-3: ${{ steps.prepare-matrix.outputs.images-chunk-json-3 }}
+ images-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-3 }}
+ images-chunk-size-3: ${{ steps.prepare-matrix.outputs.images-chunk-size-3 }}
+ # artifacts-4 of 16
+ images-chunk-json-4: ${{ steps.prepare-matrix.outputs.images-chunk-json-4 }}
+ images-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-4 }}
+ images-chunk-size-4: ${{ steps.prepare-matrix.outputs.images-chunk-size-4 }}
+ # artifacts-5 of 16
+ images-chunk-json-5: ${{ steps.prepare-matrix.outputs.images-chunk-json-5 }}
+ images-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-5 }}
+ images-chunk-size-5: ${{ steps.prepare-matrix.outputs.images-chunk-size-5 }}
+ # artifacts-6 of 16
+ images-chunk-json-6: ${{ steps.prepare-matrix.outputs.images-chunk-json-6 }}
+ images-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-6 }}
+ images-chunk-size-6: ${{ steps.prepare-matrix.outputs.images-chunk-size-6 }}
+ # artifacts-7 of 16
+ images-chunk-json-7: ${{ steps.prepare-matrix.outputs.images-chunk-json-7 }}
+ images-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-7 }}
+ images-chunk-size-7: ${{ steps.prepare-matrix.outputs.images-chunk-size-7 }}
+ # artifacts-8 of 16
+ images-chunk-json-8: ${{ steps.prepare-matrix.outputs.images-chunk-json-8 }}
+ images-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-8 }}
+ images-chunk-size-8: ${{ steps.prepare-matrix.outputs.images-chunk-size-8 }}
+ # artifacts-9 of 16
+ images-chunk-json-9: ${{ steps.prepare-matrix.outputs.images-chunk-json-9 }}
+ images-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-9 }}
+ images-chunk-size-9: ${{ steps.prepare-matrix.outputs.images-chunk-size-9 }}
+ # artifacts-10 of 16
+ images-chunk-json-10: ${{ steps.prepare-matrix.outputs.images-chunk-json-10 }}
+ images-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-10 }}
+ images-chunk-size-10: ${{ steps.prepare-matrix.outputs.images-chunk-size-10 }}
+ # artifacts-11 of 16
+ images-chunk-json-11: ${{ steps.prepare-matrix.outputs.images-chunk-json-11 }}
+ images-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-11 }}
+ images-chunk-size-11: ${{ steps.prepare-matrix.outputs.images-chunk-size-11 }}
+ # artifacts-12 of 16
+ images-chunk-json-12: ${{ steps.prepare-matrix.outputs.images-chunk-json-12 }}
+ images-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-12 }}
+ images-chunk-size-12: ${{ steps.prepare-matrix.outputs.images-chunk-size-12 }}
+ # artifacts-13 of 16
+ images-chunk-json-13: ${{ steps.prepare-matrix.outputs.images-chunk-json-13 }}
+ images-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-13 }}
+ images-chunk-size-13: ${{ steps.prepare-matrix.outputs.images-chunk-size-13 }}
+ # artifacts-14 of 16
+ images-chunk-json-14: ${{ steps.prepare-matrix.outputs.images-chunk-json-14 }}
+ images-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-14 }}
+ images-chunk-size-14: ${{ steps.prepare-matrix.outputs.images-chunk-size-14 }}
+ # artifacts-15 of 16
+ images-chunk-json-15: ${{ steps.prepare-matrix.outputs.images-chunk-json-15 }}
+ images-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-15 }}
+ images-chunk-size-15: ${{ steps.prepare-matrix.outputs.images-chunk-size-15 }}
+ # artifacts-16 of 16
+ images-chunk-json-16: ${{ steps.prepare-matrix.outputs.images-chunk-json-16 }}
+ images-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-16 }}
+ images-chunk-size-16: ${{ steps.prepare-matrix.outputs.images-chunk-size-16 }}
+
+# template file: 250.single_aggr-jobs.yaml
+
+ # ------ aggregate all artifact chunks into a single dependency -------
+
+ all-artifacts-ready:
+ name: "17 artifacts chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "build-artifacts-chunk-1","build-artifacts-chunk-2","build-artifacts-chunk-3","build-artifacts-chunk-4","build-artifacts-chunk-5","build-artifacts-chunk-6","build-artifacts-chunk-7","build-artifacts-chunk-8","build-artifacts-chunk-9","build-artifacts-chunk-10","build-artifacts-chunk-11","build-artifacts-chunk-12","build-artifacts-chunk-13","build-artifacts-chunk-14","build-artifacts-chunk-15","build-artifacts-chunk-16","build-artifacts-chunk-17" ] # <-- HERE: all artifact chunk numbers.
+ steps:
+ - name: fake step
+ run: uptime
+
+ all-images-ready:
+ name: "16 image chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "build-images-chunk-1","build-images-chunk-2","build-images-chunk-3","build-images-chunk-4","build-images-chunk-5","build-images-chunk-6","build-images-chunk-7","build-images-chunk-8","build-images-chunk-9","build-images-chunk-10","build-images-chunk-11","build-images-chunk-12","build-images-chunk-13","build-images-chunk-14","build-images-chunk-15","build-images-chunk-16" ] # <-- HERE: all image chunk numbers.
+ steps:
+ - name: fake step
+ run: uptime
+
+ all-artifacts-and-images-ready:
+ name: "17 artifacts and 16 image chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ]
+ steps:
+ - name: fake step
+ run: uptime
+
+# template file: 550.per-chunk-artifacts_job.yaml
+
+ "build-artifacts-chunk-1": # templated "build-artifacts-chunk-1"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-1 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-1) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A1' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-2": # templated "build-artifacts-chunk-2"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-2 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-2) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A2' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-3": # templated "build-artifacts-chunk-3"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-3 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-3) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A3' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-4": # templated "build-artifacts-chunk-4"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-4 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-4) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A4' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-5": # templated "build-artifacts-chunk-5"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-5 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-5) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A5' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-6": # templated "build-artifacts-chunk-6"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-6 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-6) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A6' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-7": # templated "build-artifacts-chunk-7"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-7 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-7) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A7' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-8": # templated "build-artifacts-chunk-8"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-8 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-8) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A8' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-9": # templated "build-artifacts-chunk-9"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-9 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-9) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A9' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-10": # templated "build-artifacts-chunk-10"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-10 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-10) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A10' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-11": # templated "build-artifacts-chunk-11"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-11 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-11) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A11' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-12": # templated "build-artifacts-chunk-12"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-12 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-12) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A12' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-13": # templated "build-artifacts-chunk-13"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-13 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-13) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A13' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-14": # templated "build-artifacts-chunk-14"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-14 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-14) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A14' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-15": # templated "build-artifacts-chunk-15"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-15 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-15) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A15' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-16": # templated "build-artifacts-chunk-16"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-16 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-16) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A16' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-17": # templated "build-artifacts-chunk-17"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-17 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-17) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A17' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 60
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+
+# template file: 650.per-chunk-images_job.yaml
+
+ "build-images-chunk-1": # templated "build-images-chunk-1"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-1 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-1) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I1' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-2": # templated "build-images-chunk-2"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-2 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-2) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I2' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-3": # templated "build-images-chunk-3"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-3 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-3) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I3' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-4": # templated "build-images-chunk-4"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-4 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-4) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I4' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-5": # templated "build-images-chunk-5"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-5 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-5) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I5' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-6": # templated "build-images-chunk-6"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-6 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-6) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I6' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-7": # templated "build-images-chunk-7"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-7 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-7) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I7' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-8": # templated "build-images-chunk-8"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-8 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-8) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I8' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-9": # templated "build-images-chunk-9"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-9 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-9) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I9' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-10": # templated "build-images-chunk-10"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-10 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-10) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I10' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-11": # templated "build-images-chunk-11"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-11 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-11) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I11' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
-jobs:
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-12": # templated "build-images-chunk-12"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-12 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-12) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I12' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-13": # templated "build-images-chunk-13"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-13 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-13) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I13' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-14": # templated "build-images-chunk-14"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-14 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-14) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I14' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
- Run:
- name: "Execute workflow"
- if: ${{ github.repository_owner == 'Armbian' }}
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-15": # templated "build-images-chunk-15"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-15 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-15) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I15' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-16": # templated "build-images-chunk-16"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-16 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-16) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I16' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 120
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ #- name: Choose random user for upload
+ # run: |
+
+ #arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ #arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ #rand=$[ $RANDOM % 2 ]
+ #echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 25
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'github' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'github' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
+ uses: ncipollo/release-action@v1
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: true
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: true
+ token: "${{ env.GH_TOKEN }}"
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ #token: ${{ env.upload_user }}
+
+ - name: Deploy to servers
+ timeout-minutes: 240
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+
+ # read servers data from NetBox database
+ curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&tag=push&tag=images&status=active" \
+ | jq '.results[] | .name,.custom_fields["path"],.custom_fields["port"],.custom_fields["username"]' | sed "s|null|beta|" | sed "s/\"//g" | xargs -n4 -d'\n' \
+ | sed "s/ /,/g" | jq -cnR '[inputs | select(length>0)]' | jq -c '.[]' | sed "s/\"//g" | sort -R | \
+ while read i; do
+ SERVER_URL=$(echo $i | cut -d "," -f1)
+ SERVER_PORT=$(echo $i | cut -d "," -f3)
+ SERVER_PATH=$(echo $i | cut -d "," -f2)
+ SERVER_USERNAME=$(echo $i | cut -d "," -f4)
+ # clean
+ ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "${SERVER_URL}"
+ # upload
+ rsync --progress -e \
+ "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
+ -rvP output/images/ "${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/incoming/"
+ done
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+
+# template file: 750.single_repo.yaml
+
+ # ------ publish packages to repository -------
+
+ publish-debs-to-repo:
+ name: "Download artifacts from ORAS cache"
+ runs-on: [ repository ]
+ if: ${{ !failure() && !cancelled() && github.event.inputs.targetsFilterInclude == '' && inputs.ref == '' }} # eg: run if dependencies worked. See https://github.com/orgs/community/discussions/45058#discussioncomment-4817378
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Prepare dependencies.
+ # If no /usr/bin/gpg, install gnupg2
+ # If no /usr/bin/reprepro, install reprepro
+ # If no /usr/bin/lftp, install lftp
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/gpg ]; then
+ sudo apt-get update
+ sudo apt-get install -y gnupg2
+ fi
+ if [ ! -e /usr/bin/reprepro ]; then
+ sudo apt-get update
+ sudo apt-get install -y reprepro
+ fi
+ if [ ! -e /usr/bin/lftp ]; then
+ sudo apt-get update
+ sudo apt-get install -y lftp
+ fi
+
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: 0
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: 0
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ #rm -rf userpatches.repo
+
+ # Clean off output/info, if any
+ # Clean off debs and debs-beta
+ - name: Cleanup output/info
+ run: |
+ rm -rfv output/info output/debs output/debs-beta
+ mkdir -pv output
+
+ # Download the artifacts (output/info) produced by the prepare-matrix job.
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: build-info-json
+ path: output/info
+
+ # List the artifacts we downloaded
+ - name: List artifacts
+ run: |
+ ls -laht output/info
+
+ - name: Download the debs
+ id: download-debs
+ run: |
+ bash ./compile.sh debs-to-repo-download REVISION="${{ needs.matrix_prep.outputs.version }}" BETA=${{ github.event.inputs.nightlybuild || 'yes' }} SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: Import GPG key
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Sync parts when making single images / maintainer
+# if: ${{ (github.event.inputs.skipImages || 'yes') == 'no' }}
+ run: |
+
+ #
+ TARGET=""
+ if [ "${{ github.event.inputs.skipImages}}" == "no" ] || [ "'yes'" == "no" ]; then
+ TARGET="partial/"
+ else
+ # drop nightly repository
+ sudo rm -rf /outgoing/repository-beta/*
+ # sync to stable from the list
+ rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" --include-from=userpatches.repo/stable-repo.sync \
+ --exclude='*' --progress -va output/debs-beta/. \
+ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/incoming/${TARGET}
+ fi
+
+ echo "sync all parts"
+ rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -arvc \
+ --include='debs***' \
+ --exclude='*' \
+ --remove-source-files \
+ --delete \
+ output/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/incoming/${TARGET}
+ # clean
+ find output/. -type d -empty -delete
+
+ - name: "Run repository update action"
+ if: ${{ (github.event.inputs.skipImages || 'yes') == 'yes' }}
+ uses: peter-evans/repository-dispatch@v3
+ with:
+ token: ${{ secrets.DISPATCH }}
+ repository: armbian/os
+ event-type: "Repository update"
+
+ - name: "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}"
+ run: |
+ echo "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}"
+
+ outputs:
+
+ # not related to matrix
+ version: ${{ needs.matrix_prep.outputs.version }}
+
+# template file: 950.single_footer.yaml
+
+ # ------ aggregate all artifact chunks into a single dependency -------
+
+ closing:
+ name: "Footer"
runs-on: ubuntu-latest
+ if: ${{ !failure() && !cancelled() && inputs.ref == '' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }}
+ needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ]
steps:
- - name: Repository Dispatch
+
+ - name: "Run webindex update action"
+ if: ${{ (github.event.inputs.skipImages || 'yes') == 'no' }}
uses: peter-evans/repository-dispatch@v3
with:
- token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ secrets.DISPATCH }}
repository: armbian/os
- event-type: "Refresh board list"
+ event-type: "Webindex update"
+
+ - name: "Download all workflow run artifacts"
+ if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' }}
+ uses: actions/download-artifact@v4
+ with:
+ name: assets-for-download-all
+ path: downloads
+
+ - name: "Read version"
+ run: |
+
+ echo "version=$(cat downloads/version 2>/dev/null || true)" >> $GITHUB_ENV
+
+ # Delete artifact
+ - uses: geekyeggo/delete-artifact@v2
+ with:
+ name: assets-for-download-all
+ failOnError: false
+
+ # Cleaning releases
+ #
+ # To do: we need to differentiate between pre and releases and optimise clenaing procees. Following action doesn't know to handle this best
+ #- uses: dev-drprasad/delete-older-releases@v0.3.2
+ # with:
+ # repo: armbian/os
+ # keep_latest: 16
+ # env:
+ # GITHUB_TOKEN: "${{ env.GH_TOKEN }}"
+
+ # Cleaning logs
+ - name: "Keep only 14 days of workflow logs"
+ uses: igorjs/gh-actions-clean-workflow@v4
+ with:
+ token: "${{ env.GH_TOKEN }}"
+ days_old: 7
+
+ # Switch pre-release to release
+ - uses: ncipollo/release-action@v1
+ if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }}
+ with:
+ repo: "${{ env.RELEASE_REPOSITORY }}"
+ tag: "${{ env.version }}"
+ omitBody: true
+ omitName: true
+ allowUpdates: true
+ makeLatest: true
+ token: "${{ env.GH_TOKEN }}"
From 0ee20ced318637e5a5b4948761f9281a6e1e7022 Mon Sep 17 00:00:00 2001
From: yuweiyuan8 <871282669@qq.com>
Date: Tue, 6 Feb 2024 09:42:09 +0800
Subject: [PATCH 05/10] add redmi k30 pro
---
...031-add-lmi-csot-panel.patch => 0033-add-lmi-csot-panel.patch} | 0
...hscreen-driver.patch => 0034-add-lmi-touchscreen-driver.patch} | 0
2 files changed, 0 insertions(+), 0 deletions(-)
rename patch/kernel/archive/sm8250-6.7/{0031-add-lmi-csot-panel.patch => 0033-add-lmi-csot-panel.patch} (100%)
rename patch/kernel/archive/sm8250-6.7/{0032-add-lmi-touchscreen-driver.patch => 0034-add-lmi-touchscreen-driver.patch} (100%)
diff --git a/patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch b/patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
similarity index 100%
rename from patch/kernel/archive/sm8250-6.7/0031-add-lmi-csot-panel.patch
rename to patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
diff --git a/patch/kernel/archive/sm8250-6.7/0032-add-lmi-touchscreen-driver.patch b/patch/kernel/archive/sm8250-6.7/0034-add-lmi-touchscreen-driver.patch
similarity index 100%
rename from patch/kernel/archive/sm8250-6.7/0032-add-lmi-touchscreen-driver.patch
rename to patch/kernel/archive/sm8250-6.7/0034-add-lmi-touchscreen-driver.patch
From 05da5b8488dd954cf50b3675ec0cd0f1e77a3a9a Mon Sep 17 00:00:00 2001
From: Yu weiyuan <32944372+yuweiyuan8@users.noreply.github.com>
Date: Tue, 6 Feb 2024 18:01:42 +0800
Subject: [PATCH 06/10] Delete
patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
---
.../sm8250-6.7/0033-add-lmi-csot-panel.patch | 467 ------------------
1 file changed, 467 deletions(-)
delete mode 100644 patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
diff --git a/patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch b/patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
deleted file mode 100644
index 571373652a1d..000000000000
--- a/patch/kernel/archive/sm8250-6.7/0033-add-lmi-csot-panel.patch
+++ /dev/null
@@ -1,467 +0,0 @@
-From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
-From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com>
-Date: Mon, 29 Jan 2024 19:05:12 +0800
-Subject: Patching kernel arm64 files drivers/gpu/drm/panel/Kconfig
- drivers/gpu/drm/panel/Makefile drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
-
-Signed-off-by: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com>
----
- drivers/gpu/drm/panel/Kconfig | 9 +
- drivers/gpu/drm/panel/Makefile | 1 +
- drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c | 416 ++++++++++
- 3 files changed, 426 insertions(+)
-
-diff --git a/drivers/gpu/drm/panel/Kconfig b/drivers/gpu/drm/panel/Kconfig
-index 99e14dc212ec..2ab846e5cfaf 100644
---- a/drivers/gpu/drm/panel/Kconfig
-+++ b/drivers/gpu/drm/panel/Kconfig
-@@ -8,6 +8,15 @@ config DRM_PANEL
- menu "Display Panels"
- depends on DRM && DRM_PANEL
-
-+config DRM_PANEL_XIAOMI_LMI_CSOT
-+ tristate "Redmi k30 pro (lmi) 1080x2340 CSOT AMOLED panel"
-+ depends on OF
-+ depends on DRM_MIPI_DSI
-+ depends on BACKLIGHT_CLASS_DEVICE
-+ help
-+ Say Y here if you want to enable support for the CSOT 1080x2340
-+ dsc cmd mode panel as found in Xiaomi Mi 10 devices.
-+
- config DRM_PANEL_ABT_Y030XX067A
- tristate "ABT Y030XX067A 320x480 LCD panel"
- depends on OF && SPI
-diff --git a/drivers/gpu/drm/panel/Makefile b/drivers/gpu/drm/panel/Makefile
-index d10c3de51c6d..687a239df8b1 100644
---- a/drivers/gpu/drm/panel/Makefile
-+++ b/drivers/gpu/drm/panel/Makefile
-@@ -88,3 +88,4 @@ obj-$(CONFIG_DRM_PANEL_VISIONOX_VTDR6130) += panel-visionox-vtdr6130.o
- obj-$(CONFIG_DRM_PANEL_VISIONOX_R66451) += panel-visionox-r66451.o
- obj-$(CONFIG_DRM_PANEL_WIDECHIPS_WS2401) += panel-widechips-ws2401.o
- obj-$(CONFIG_DRM_PANEL_XINPENG_XPP055C272) += panel-xinpeng-xpp055c272.o
-+obj-$(CONFIG_DRM_PANEL_XIAOMI_LMI_CSOT) += panel-xiaomi-lmi-csot.o
-diff --git a/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c b/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
-new file mode 100644
-index 000000000000..5e1d3ec9c08d
---- /dev/null
-+++ b/drivers/gpu/drm/panel/panel-xiaomi-lmi-csot.c
-@@ -0,0 +1,416 @@
-+// SPDX-License-Identifier: GPL-2.0-only
-+// Copyright (c) 2024 FIXME
-+// Generated with linux-mdss-dsi-panel-driver-generator from vendor device tree:
-+// Copyright (c) 2013, The Linux Foundation. All rights reserved. (FIXME)
-+
-+#include
-+#include
-+#include
-+#include
-+#include
-+
-+#include