diff --git a/.github/workflows/docker-build-push.yml b/.github/workflows/docker-build-push.yml index 9c8d611..a26dfb2 100644 --- a/.github/workflows/docker-build-push.yml +++ b/.github/workflows/docker-build-push.yml @@ -6,57 +6,12 @@ on: - "main" jobs: - generate-matrix: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - run_docker_jobs: ${{ steps.set-run-docker-jobs.outputs.run_docker_jobs }} - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - fetch-depth: ${{ github.event_name == 'pull_request' && 2 || 0 }} - - name: Set Matrix - id: set-matrix - run: | - echo "matrix=$(python3 bin/generate_matrix.py submittyrpi ${{ github.event.before }} ${{ github.event.after }})" >> $GITHUB_OUTPUT - - name: List Matrix - run: | - echo ${{ steps.set-matrix.outputs.matrix }} - - name: Set Run Condition - id: set-run-docker-jobs - run: | - num_to_build=$(echo '${{ steps.set-matrix.outputs.matrix }}' | jq '.include | length') - if [[ "$num_to_build" -eq 0 ]]; then - echo "run_docker_jobs=false" >> $GITHUB_OUTPUT - else - echo "run_docker_jobs=true" >> $GITHUB_OUTPUT - fi - docker: - needs: - - generate-matrix - if: needs.generate-matrix.outputs.run_docker_jobs == 'true' - runs-on: ubuntu-latest - strategy: - fail-fast: true - matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }} - steps: - - name: Check out repo - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Docker Hub login - uses: docker/login-action@releases/v1 - with: - username: ${{ secrets.DOCKER_USERNAME_SUBMITTYRPI }} - password: ${{ secrets.DOCKER_PASSWORD_SUBMITTYRPI }} - - name: Build and push docker - uses: docker/build-push-action@v4 - with: - context: ${{ matrix.context }} - push: true - tags: ${{ matrix.tags }} - platforms: linux/amd64,linux/arm64 - + call-docker-build-push: + uses: submitty/action-docker-build/.github/workflows/docker-build-push.yml@v24.06.00 + with: + push: true + docker_username: ${{ vars.docker_username }} + base_commit: ${{ github.event.before }} + head_commit: ${{ github.event.after }} + secrets: + docker_password: ${{ secrets.docker_password }} diff --git a/bin/generate_matrix.py b/bin/generate_matrix.py deleted file mode 100644 index e148b27..0000000 --- a/bin/generate_matrix.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import os -import subprocess -import sys -from pathlib import Path - -if len(sys.argv) > 4: - print("Too many arguments!", file=sys.stderr) - exit(1) -elif len(sys.argv) < 2: - print("Not enough arguments!", file=sys.argv) - exit(1) - -if not os.path.isdir("dockerfiles"): - print("dockerfiles missing!", file=sys.stderr) - exit(1) - -username = sys.argv[1] - -to_build = [] - -hash_before = sys.argv[2] -hash_after = sys.argv[3] - -# Get list of all changed files between 2 commits -output = subprocess.check_output(["git", "--no-pager", "diff", "--name-only", "--diff-filter=d", hash_before, hash_after]) -paths_updated = output.decode("utf-8").splitlines() - -build_all = "UPDATE_ALL" in paths_updated - -if not build_all: - image_set = set() - image_tag_set = set() - - for path in paths_updated: - parts = Path(path).parts - # Only rebuild if modified files were in dockerfiles - if parts[0] != "dockerfiles": - continue - - if len(parts) < 3: - continue - - if not os.path.isdir(Path(parts[0], parts[1], parts[2])): - continue - - if f"{parts[1]}:{parts[2]}" in image_tag_set: - continue - - metadata = json.loads(open(Path(parts[0]) / parts[1] / "metadata.json").read()) - - push_latest = False - - if metadata["pushLatest"]: - if metadata["latestTag"] == parts[2]: - push_latest = True - - tags = f"{username}/{parts[1]}:{parts[2]}" - if push_latest: - tags += f",{username}/{parts[1]}:latest" - to_build.append( - { - "tags": tags, - "context": str(os.path.dirname(path)) - } - ) - image_set.add(parts[1]) - image_tag_set.add(f"{parts[1]}:{parts[2]}") - - # search for any metadata.json edits - for path in paths_updated: - parts = Path(path).parts - if parts[0] != "dockerfiles": - continue - if len(parts) < 3: - continue - if parts[2] != "metadata.json": - continue - metadata = json.loads(open(Path(parts[0]) / parts[1] / "metadata.json").read()) - if not metadata["pushLatest"]: - continue # there is no latest so nothing to rebuild - tag = metadata["latestTag"] - if parts[1] in image_set: - continue # already being rebuilt - tags = f"{username}/{parts[1]}:{tag},{username}/{parts[1]}:latest" - to_build.append( - { - "tags": tags, - "context": str(Path(parts[0]) / parts[1] / tag) - } - ) - -else: - images = os.listdir("dockerfiles") - for image in images: - path = Path("dockerfiles") / image - if not path.is_dir(): - continue - - metadata = json.loads(open(path / "metadata.json").read()) - - tags = os.listdir(path) - for tag in tags: - newpath = path / tag - if not newpath.is_dir(): - continue - - push_latest = False - if metadata["pushLatest"]: - if metadata["latestTag"] == tag: - push_latest = True - - tags = f"{username}/{image}:{tag}" - if push_latest: - tags += f",{username}/{image}:latest" - to_build.append( - { - "tags": tags, - "context": str(newpath) - } - ) - -finobj = {"include": to_build} - -print(json.dumps(finobj))