From 7ee0df83a1a624955c716192363a847cf0a175df Mon Sep 17 00:00:00 2001 From: "Jose R. Gonzalez" Date: Tue, 19 Dec 2023 09:51:45 -0600 Subject: [PATCH] testing --- .github/workflows/base.yml | 100 +-- .github/workflows/check-chart-locks.yml | 84 +-- .github/workflows/checks-content.yml | 46 ++ .github/workflows/generate_package_locks.yml | 226 +++--- .gitignore | 37 + OWNERS | 2 + .../packagemapping/__init__.py | 0 .../packagemapping/packagemapping.py | 14 +- scripts/Makefile | 56 ++ scripts/README.md | 47 ++ scripts/config/base/kustomization.yaml | 2 + scripts/config/base/service-account.yaml | 4 + .../overlays/prod/cluster-role-binding.yaml | 12 + .../config/overlays/prod/cluster-role.yaml | 47 ++ .../config/overlays/prod/kustomization.yaml | 7 + scripts/config/overlays/prod/namespace.yaml | 4 + .../overlays/test/cluster-role-binding.yaml | 12 + .../config/overlays/test/cluster-role.yaml | 47 ++ .../config/overlays/test/kustomization.yaml | 7 + scripts/config/overlays/test/namespace.yaml | 4 + scripts/get-secrets | 103 +++ scripts/pyproject.toml | 3 + scripts/requirements.txt | 37 + scripts/ruff.toml | 4 + scripts/setup.cfg | 50 ++ scripts/setup.py | 3 + scripts/src/chartprreview/__init__.py | 0 scripts/src/chartprreview/chartprreview.py | 583 +++++++++++++++ .../src/chartprreview/chartprreview_test.py | 88 +++ scripts/src/chartrepomanager/__init__.py | 0 .../src/chartrepomanager/chartrepomanager.py | 518 ++++++++++++++ .../src/chartrepomanager/indexannotations.py | 50 ++ scripts/src/checkautomerge/__init__.py | 0 scripts/src/checkautomerge/checkautomerge.py | 45 ++ scripts/src/checkprcontent/__init__.py | 0 scripts/src/checkprcontent/checkpr.py | 297 ++++++++ scripts/src/indexfile/__init__.py | 0 scripts/src/indexfile/index.py | 111 +++ scripts/src/metrics/__init__.py | 0 scripts/src/metrics/metrics.py | 674 ++++++++++++++++++ scripts/src/metrics/pushowners.py | 204 ++++++ scripts/src/owners/__init__.py | 0 scripts/src/owners/checkuser.py | 95 +++ scripts/src/owners/owners_file.py | 69 ++ scripts/src/owners/user_is_repo_owner.py | 70 ++ scripts/src/pullrequest/__init__.py | 0 scripts/src/pullrequest/prartifact.py | 158 ++++ scripts/src/pullrequest/prepare_pr_comment.py | 294 ++++++++ scripts/src/release/__init__.py | 0 scripts/src/release/release_info.py | 114 +++ scripts/src/release/releasechecker.py | 313 ++++++++ scripts/src/release/releaser.py | 284 ++++++++ scripts/src/reporegex/__init__.py | 0 scripts/src/reporegex/matchers.py | 41 ++ scripts/src/report/__init__.py | 0 scripts/src/report/get_verify_params.py | 114 +++ scripts/src/report/report_info.py | 197 +++++ scripts/src/report/verifier_report.py | 303 ++++++++ .../saforcertadmin/cluster_role_binding.yaml | 12 + scripts/src/saforcertadmin/create_sa.sh | 10 + scripts/src/saforcertadmin/push_secrets.py | 171 +++++ scripts/src/saforcharttesting/__init__.py | 0 .../saforcharttesting/saforcharttesting.py | 358 ++++++++++ scripts/src/signedchart/__init__.py | 0 scripts/src/signedchart/signedchart.py | 182 +++++ scripts/src/tools/__init__.py | 0 scripts/src/tools/gitutils.py | 175 +++++ scripts/src/updateindex/__init__.py | 0 scripts/src/updateindex/updateindex.py | 229 ++++++ scripts/src/workflowtesting/__init__.py | 0 scripts/src/workflowtesting/checkprforci.py | 129 ++++ 71 files changed, 6635 insertions(+), 211 deletions(-) create mode 100644 .github/workflows/checks-content.yml create mode 100644 .gitignore create mode 100644 OWNERS rename {scripts => old_scripts}/packagemapping/__init__.py (100%) rename {scripts => old_scripts}/packagemapping/packagemapping.py (98%) create mode 100644 scripts/Makefile create mode 100644 scripts/README.md create mode 100644 scripts/config/base/kustomization.yaml create mode 100644 scripts/config/base/service-account.yaml create mode 100644 scripts/config/overlays/prod/cluster-role-binding.yaml create mode 100644 scripts/config/overlays/prod/cluster-role.yaml create mode 100644 scripts/config/overlays/prod/kustomization.yaml create mode 100644 scripts/config/overlays/prod/namespace.yaml create mode 100644 scripts/config/overlays/test/cluster-role-binding.yaml create mode 100644 scripts/config/overlays/test/cluster-role.yaml create mode 100644 scripts/config/overlays/test/kustomization.yaml create mode 100644 scripts/config/overlays/test/namespace.yaml create mode 100755 scripts/get-secrets create mode 100644 scripts/pyproject.toml create mode 100644 scripts/requirements.txt create mode 100644 scripts/ruff.toml create mode 100644 scripts/setup.cfg create mode 100644 scripts/setup.py create mode 100644 scripts/src/chartprreview/__init__.py create mode 100644 scripts/src/chartprreview/chartprreview.py create mode 100644 scripts/src/chartprreview/chartprreview_test.py create mode 100644 scripts/src/chartrepomanager/__init__.py create mode 100644 scripts/src/chartrepomanager/chartrepomanager.py create mode 100644 scripts/src/chartrepomanager/indexannotations.py create mode 100644 scripts/src/checkautomerge/__init__.py create mode 100644 scripts/src/checkautomerge/checkautomerge.py create mode 100644 scripts/src/checkprcontent/__init__.py create mode 100644 scripts/src/checkprcontent/checkpr.py create mode 100644 scripts/src/indexfile/__init__.py create mode 100644 scripts/src/indexfile/index.py create mode 100644 scripts/src/metrics/__init__.py create mode 100644 scripts/src/metrics/metrics.py create mode 100644 scripts/src/metrics/pushowners.py create mode 100644 scripts/src/owners/__init__.py create mode 100644 scripts/src/owners/checkuser.py create mode 100644 scripts/src/owners/owners_file.py create mode 100644 scripts/src/owners/user_is_repo_owner.py create mode 100644 scripts/src/pullrequest/__init__.py create mode 100644 scripts/src/pullrequest/prartifact.py create mode 100644 scripts/src/pullrequest/prepare_pr_comment.py create mode 100644 scripts/src/release/__init__.py create mode 100644 scripts/src/release/release_info.py create mode 100644 scripts/src/release/releasechecker.py create mode 100644 scripts/src/release/releaser.py create mode 100644 scripts/src/reporegex/__init__.py create mode 100644 scripts/src/reporegex/matchers.py create mode 100644 scripts/src/report/__init__.py create mode 100644 scripts/src/report/get_verify_params.py create mode 100644 scripts/src/report/report_info.py create mode 100644 scripts/src/report/verifier_report.py create mode 100644 scripts/src/saforcertadmin/cluster_role_binding.yaml create mode 100755 scripts/src/saforcertadmin/create_sa.sh create mode 100644 scripts/src/saforcertadmin/push_secrets.py create mode 100644 scripts/src/saforcharttesting/__init__.py create mode 100644 scripts/src/saforcharttesting/saforcharttesting.py create mode 100644 scripts/src/signedchart/__init__.py create mode 100644 scripts/src/signedchart/signedchart.py create mode 100644 scripts/src/tools/__init__.py create mode 100644 scripts/src/tools/gitutils.py create mode 100644 scripts/src/updateindex/__init__.py create mode 100644 scripts/src/updateindex/updateindex.py create mode 100644 scripts/src/workflowtesting/__init__.py create mode 100644 scripts/src/workflowtesting/checkprforci.py diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index 8dc7598..9679ff2 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -1,51 +1,51 @@ -# This is a basic workflow to help you get started with Actions - -name: CI - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the "main" branch - push: - branches: [ "main" ] - pull_request: - branches: [ "main", "gh-pages" ] - release: - types: [published] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 - - # Runs a single command using the runners shell - - name: Run a one-line script - run: echo Hello, world! - - # Runs a set of commands using the runners shell - - name: Run a multi-line script - run: | - echo Add other actions to build, - echo test, and deploy your project. - - - name: Display the dispatching event - env: - e: ${{ toJson(github.event) }} - run: echo "Event Date - $e" - ensure-valid-submitter: - uses: ./.github/workflows/check-chart-locks.yml - with: - # Real-world use case would introspect this data and then pass it to this workflow. - category: community - organization: examplecom - chartname: wildfly +# # This is a basic workflow to help you get started with Actions + +# name: CI + +# # Controls when the workflow will run +# on: +# # Triggers the workflow on push or pull request events but only for the "main" branch +# push: +# branches: [ "main" ] +# pull_request: +# branches: [ "main", "gh-pages" ] +# release: +# types: [published] + +# # Allows you to run this workflow manually from the Actions tab +# workflow_dispatch: + +# # A workflow run is made up of one or more jobs that can run sequentially or in parallel +# jobs: +# # This workflow contains a single job called "build" +# build: +# # The type of runner that the job will run on +# runs-on: ubuntu-latest + +# # Steps represent a sequence of tasks that will be executed as part of the job +# steps: +# # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it +# - uses: actions/checkout@v3 + +# # Runs a single command using the runners shell +# - name: Run a one-line script +# run: echo Hello, world! + +# # Runs a set of commands using the runners shell +# - name: Run a multi-line script +# run: | +# echo Add other actions to build, +# echo test, and deploy your project. + +# - name: Display the dispatching event +# env: +# e: ${{ toJson(github.event) }} +# run: echo "Event Date - $e" +# ensure-valid-submitter: +# uses: ./.github/workflows/check-chart-locks.yml +# with: +# # Real-world use case would introspect this data and then pass it to this workflow. +# category: community +# organization: examplecom +# chartname: wildfly diff --git a/.github/workflows/check-chart-locks.yml b/.github/workflows/check-chart-locks.yml index 6e7671b..958b5fc 100644 --- a/.github/workflows/check-chart-locks.yml +++ b/.github/workflows/check-chart-locks.yml @@ -1,47 +1,47 @@ -# Check Chart Locks review the existing chart locks and fails if -# the submission does not existing in the right directory structure. -name: Ensure Submitter is Valid +# # Check Chart Locks review the existing chart locks and fails if +# # the submission does not existing in the right directory structure. +# name: Ensure Submitter is Valid -on: - workflow_call: - inputs: - category: - required: true - type: string - description: The category of the submission. (e.g. "partner", choose from [redhat, partner, community]) - organization: - required: true - type: string - description: The submitting organization (e.g. "hashicorp") - chartname: - required: true - type: string - description: The name of the chart (e.g. "vault") +# on: +# workflow_call: +# inputs: +# category: +# required: true +# type: string +# description: The category of the submission. (e.g. "partners", choose from [redhat, partners, community]) +# organization: +# required: true +# type: string +# description: The submitting organization (e.g. "hashicorp") +# chartname: +# required: true +# type: string +# description: The name of the chart (e.g. "vault") -env: - LOCKFILE_URL: https://komish.github.io/actions-workflow-call-test/lock.json +# env: +# LOCKFILE_URL: https://komish.github.io/actions-workflow-call-test/lock.json -jobs: - assert-chart-not-locked-or-locked-but-valid: - runs-on: ubuntu-latest - steps: - - name: Assemble directory path - id: assemble-path - run: | - set -o pipefail - echo "dirpath=${{ inputs.category }}/${{ inputs.organization }}/${{ inputs.chartname }}" | tee -a $GITHUB_OUTPUT - - name: Read entry from current Lockfile - run: | - wget "${{ env.LOCKFILE_URL }}" -O lock.json - md5sum lock.json - - name: Compare lockpaths - run: | - lockpath=$(jq -r .packages.${{ inputs.chartname }} lock.json) - test "${lockpath}" == "null" \ - && { echo "No lock found for chart ${{ inputs.chartname }}. We're clear to merge this in."; exit 0 ;} - test "${lockpath}" = "${{ steps.assemble-path.outputs.dirpath }}" \ - && { echo "Lock found for chart ${{ inputs.chartname }} and submission is coming from the correct path ${{ steps.assemble-path.outputs.dirpath }}."; exit 0 ;} +# jobs: +# assert-chart-not-locked-or-locked-but-valid: +# runs-on: ubuntu-latest +# steps: +# - name: Assemble directory path +# id: assemble-path +# run: | +# set -o pipefail +# echo "dirpath=${{ inputs.category }}/${{ inputs.organization }}/${{ inputs.chartname }}" | tee -a $GITHUB_OUTPUT +# - name: Read entry from current Lockfile +# run: | +# wget "${{ env.LOCKFILE_URL }}" -O lock.json +# md5sum lock.json +# - name: Compare lockpaths +# run: | +# lockpath=$(jq -r .packages.${{ inputs.chartname }} lock.json) +# test "${lockpath}" == "null" \ +# && { echo "No lock found for chart ${{ inputs.chartname }}. We're clear to merge this in."; exit 0 ;} +# test "${lockpath}" = "${{ steps.assemble-path.outputs.dirpath }}" \ +# && { echo "Lock found for chart ${{ inputs.chartname }} and submission is coming from the correct path ${{ steps.assemble-path.outputs.dirpath }}."; exit 0 ;} - echo "::error::Submission is invalid. The chart name '${{ inputs.chartname }}' is locked to submissions from path '${lockpath}' and this submission appears to come from '${{ steps.assemble-path.outputs.dirpath }}'" - exit 1 +# echo "::error::Submission is invalid. The chart name '${{ inputs.chartname }}' is locked to submissions from path '${lockpath}' and this submission appears to come from '${{ steps.assemble-path.outputs.dirpath }}'" +# exit 1 \ No newline at end of file diff --git a/.github/workflows/checks-content.yml b/.github/workflows/checks-content.yml new file mode 100644 index 0000000..787e30f --- /dev/null +++ b/.github/workflows/checks-content.yml @@ -0,0 +1,46 @@ +name: Test Check Content + +on: + pull_request_target: + types: [opened, synchronize, reopened, edited, ready_for_review, labeled] + +jobs: + check-content: + name: Check Content + runs-on: ubuntu-20.04 + if: | + github.event.pull_request.draft == false + steps: + - name: Checkout Repository Base + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install CI Scripts + run: | + # set up python scripts + echo "set up python script in $PWD" + python3 -m venv ve1 + cd scripts + ../ve1/bin/pip3 install -r requirements.txt + ../ve1/bin/pip3 install . + cd .. + + - name: Check contributor + run: | + ./ve1/bin/user-is-repo-owner ${{ github.event.pull_request.user.login }} + echo $? + + run-after: + name: Runs After + needs: check-content + runs-on: ubuntu-20.04 + if: | + github.event.pull_request.draft == false && + needs.check-content.outcome == 'success' + steps: + - run: | + echo "Woo hoo!" \ No newline at end of file diff --git a/.github/workflows/generate_package_locks.yml b/.github/workflows/generate_package_locks.yml index 376e408..d2a1e29 100644 --- a/.github/workflows/generate_package_locks.yml +++ b/.github/workflows/generate_package_locks.yml @@ -1,124 +1,124 @@ -# This workflow generates the JSON representation -name: Generate Package Locks +# # This workflow generates the JSON representation +# name: Generate Package Locks -on: - push: - branches: ["main"] - workflow_dispatch: +# on: +# push: +# branches: ["main"] +# workflow_dispatch: -env: - PR_DEST_BRANCH_NAME: gh-pages +# env: +# PR_DEST_BRANCH_NAME: gh-pages -concurrency: - # Prevent parallel executions of this and related tasks. - group: updating-chart-locks - cancel-in-progress: false +# concurrency: +# # Prevent parallel executions of this and related tasks. +# group: updating-chart-locks +# cancel-in-progress: false -jobs: - generate-package-locks: - outputs: - package_locks_b64: ${{ steps.generate-package-locks.outputs.package_locks_b64 }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - run: | - pip install PyYAML - - name: Generate lock file JSON from existing charts - id: generate-package-locks - run: | - set -o pipefail - python scripts/packagemapping/packagemapping.py | tee /tmp/packagelocks.json - base64 -w 0 /tmp/packagelocks.json | tee /tmp/packagelocks.json.b64 - echo "package_locks_b64=$(cat /tmp/packagelocks.json.b64)" >> $GITHUB_OUTPUT - - name: Decode and display lockfile JSON (Sanity Check) - run: | - set -o pipefail - test -n "${{ steps.generate-package-locks.outputs.package_locks_b64 }}" \ - || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} - echo ${{ steps.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq \ - || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} +# jobs: +# generate-package-locks: +# outputs: +# package_locks_b64: ${{ steps.generate-package-locks.outputs.package_locks_b64 }} +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-python@v4 +# with: +# python-version: "3.10" +# - run: | +# pip install PyYAML +# - name: Generate lock file JSON from existing charts +# id: generate-package-locks +# run: | +# set -o pipefail +# python scripts/packagemapping/packagemapping.py | tee /tmp/packagelocks.json +# base64 -w 0 /tmp/packagelocks.json | tee /tmp/packagelocks.json.b64 +# echo "package_locks_b64=$(cat /tmp/packagelocks.json.b64)" >> $GITHUB_OUTPUT +# - name: Decode and display lockfile JSON (Sanity Check) +# run: | +# set -o pipefail +# test -n "${{ steps.generate-package-locks.outputs.package_locks_b64 }}" \ +# || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} +# echo ${{ steps.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq \ +# || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} - compare-package-lock-manifests: - needs: generate-package-locks - outputs: - needs-updating: ${{ steps.compare-package-locks.outputs.needs-updating }} - runs-on: ubuntu-latest - steps: - - name: Determine if package lock entries need updating - id: compare-package-locks - run: | - needsupdating=false - wget https://komish.github.io/actions-workflow-call-test/lock.json -O current-locks.json - set -o pipefail - test -n "${{ needs.generate-package-locks.outputs.package_locks_b64 }}" \ - || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} - echo ${{ needs.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq > generated-locks.json \ - || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} - jq .packages current-locks.json > current-packages.json - jq .packages generated-locks.json > generated-packages.json - diff current-packages.json generated-packages.json || needsupdating=true - echo needs-updating=${needsupdating} | tee -a $GITHUB_OUTPUT +# compare-package-lock-manifests: +# needs: generate-package-locks +# outputs: +# needs-updating: ${{ steps.compare-package-locks.outputs.needs-updating }} +# runs-on: ubuntu-latest +# steps: +# - name: Determine if package lock entries need updating +# id: compare-package-locks +# run: | +# needsupdating=false +# wget https://komish.github.io/actions-workflow-call-test/lock.json -O current-locks.json +# set -o pipefail +# test -n "${{ needs.generate-package-locks.outputs.package_locks_b64 }}" \ +# || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} +# echo ${{ needs.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq > generated-locks.json \ +# || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} +# jq .packages current-locks.json > current-packages.json +# jq .packages generated-locks.json > generated-packages.json +# diff current-packages.json generated-packages.json || needsupdating=true +# echo needs-updating=${needsupdating} | tee -a $GITHUB_OUTPUT - craft-pr-to-project: - # TODO: This needs a token for a user who isn't the CI bot (e.g. GITHUB_TOKEN) but has commit permissions. - needs: - - compare-package-lock-manifests - - generate-package-locks - if: needs.compare-package-lock-manifests.outputs.needs-updating == true - runs-on: ubuntu-latest - steps: - - name: Clone ${{ env.PR_DEST_BRANCH_NAME }} branch - uses: actions/checkout@v3 - with: - ref: ${{ env.PR_DEST_BRANCH_NAME }} - # token: tbd - - name: Set Git Config - # TODO update the user name and email - run: | - git config user.name "CI Bot Name" - git config user.email cibot@example.com +# craft-pr-to-project: +# # TODO: This needs a token for a user who isn't the CI bot (e.g. GITHUB_TOKEN) but has commit permissions. +# needs: +# - compare-package-lock-manifests +# - generate-package-locks +# if: needs.compare-package-lock-manifests.outputs.needs-updating == true +# runs-on: ubuntu-latest +# steps: +# - name: Clone ${{ env.PR_DEST_BRANCH_NAME }} branch +# uses: actions/checkout@v3 +# with: +# ref: ${{ env.PR_DEST_BRANCH_NAME }} +# # token: tbd +# - name: Set Git Config +# # TODO update the user name and email +# run: | +# git config user.name "CI Bot Name" +# git config user.email cibot@example.com - - name: Create new branch - id: create-branch - run: | - branchuuid=$(uuidgen) - branchname=update-locks-${branchuuid} - git checkout -b $branchname - echo branchname=$branchname >> $GITHUB_OUTPUT +# - name: Create new branch +# id: create-branch +# run: | +# branchuuid=$(uuidgen) +# branchname=update-locks-${branchuuid} +# git checkout -b $branchname +# echo branchname=$branchname >> $GITHUB_OUTPUT - - name: Overwrite existing lockfile, Commit, and Push - id: commit-and-push - run: | - set -o pipefail - cd docs - test -n "${{ needs.generate-package-locks.outputs.package_locks_b64 }}" \ - || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} - echo ${{ needs.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq > lock.json \ - || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} - md5sum=$(md5sum lock.json | awk '{ print $1 }' ) - echo new_lockfile_md5sum=$md5sum >> $GITHUB_OUTPUT - git add lock.json - git commit -m "updating package lock file" - git push origin ${{ steps.create-branch.outputs.branchname }} +# - name: Overwrite existing lockfile, Commit, and Push +# id: commit-and-push +# run: | +# set -o pipefail +# cd docs +# test -n "${{ needs.generate-package-locks.outputs.package_locks_b64 }}" \ +# || { echo "::error::output package_locks_b64 did not contain base64 content generated from the previous step"; exit 2 ;} +# echo ${{ needs.generate-package-locks.outputs.package_locks_b64 }} | base64 -d | jq > lock.json \ +# || { echo "::error::output package_locks_b64 did not contain valid JSON once decoded" ; exit 3 ;} +# md5sum=$(md5sum lock.json | awk '{ print $1 }' ) +# echo new_lockfile_md5sum=$md5sum >> $GITHUB_OUTPUT +# git add lock.json +# git commit -m "updating package lock file" +# git push origin ${{ steps.create-branch.outputs.branchname }} - - name: Create Pull Request - # If using the GitHub Actions Token, make sure your repository allows - # for write permissions as well as Creating and Merging Pull Requests in - # Settings. - run: | - body=$(echo -e "${{ env.PR_BODY }}\n\nThe generated lockfile's md5sum is: **${{ steps.commit-and-push.outputs.new_lockfile_md5sum }}**\n") - gh pr create -B ${{ env.PR_DEST_BRANCH_NAME }} -H ${{ steps.create-branch.outputs.branchname }} --title "${{ env.PR_TITLE }} - ${{ steps.commit-and-push.outputs.new_lockfile_md5sum }}" --body "${body}" - env: - GITHUB_TOKEN: ${{ secrets.PROJECT_CI_BOT_TOKEN }} - PR_TITLE: Updating Chart Locks - PR_BODY: | - _This PR was generated by GitHub Actions_ +# - name: Create Pull Request +# # If using the GitHub Actions Token, make sure your repository allows +# # for write permissions as well as Creating and Merging Pull Requests in +# # Settings. +# run: | +# body=$(echo -e "${{ env.PR_BODY }}\n\nThe generated lockfile's md5sum is: **${{ steps.commit-and-push.outputs.new_lockfile_md5sum }}**\n") +# gh pr create -B ${{ env.PR_DEST_BRANCH_NAME }} -H ${{ steps.create-branch.outputs.branchname }} --title "${{ env.PR_TITLE }} - ${{ steps.commit-and-push.outputs.new_lockfile_md5sum }}" --body "${body}" +# env: +# GITHUB_TOKEN: ${{ secrets.PROJECT_CI_BOT_TOKEN }} +# PR_TITLE: Updating Chart Locks +# PR_BODY: | +# _This PR was generated by GitHub Actions_ - This PR is updating the Chart Locks. The content of this PR was - generated based on the current state of the charts directory. +# This PR is updating the Chart Locks. The content of this PR was +# generated based on the current state of the charts directory. - This PR should be automatically merged by GitHub Actions if there are - no merge conflicts. \ No newline at end of file +# This PR should be automatically merged by GitHub Actions if there are +# no merge conflicts. \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..666a69a --- /dev/null +++ b/.gitignore @@ -0,0 +1,37 @@ +.DS_Store +__pycache__ +*.egg-info +sanity-check.py +.cr-release-packages/*.tgz +oc + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Virtual Envs +venv.*/ \ No newline at end of file diff --git a/OWNERS b/OWNERS new file mode 100644 index 0000000..dcbd066 --- /dev/null +++ b/OWNERS @@ -0,0 +1,2 @@ +approvers: +- foo diff --git a/scripts/packagemapping/__init__.py b/old_scripts/packagemapping/__init__.py similarity index 100% rename from scripts/packagemapping/__init__.py rename to old_scripts/packagemapping/__init__.py diff --git a/scripts/packagemapping/packagemapping.py b/old_scripts/packagemapping/packagemapping.py similarity index 98% rename from scripts/packagemapping/packagemapping.py rename to old_scripts/packagemapping/packagemapping.py index 1a90899..af98554 100644 --- a/scripts/packagemapping/packagemapping.py +++ b/old_scripts/packagemapping/packagemapping.py @@ -31,6 +31,7 @@ def logInfo(msg, file=sys.stderr): """logError just prints the msg with an INFO caption to stderr unless otherwise defined""" print(f"[INFO] {msg}", file=file) + def logWarn(msg, file=sys.stderr): """logWarn just prints the msg with an INFO caption to stderr unless otherwise defined""" print(f"[WARN] {msg}", file=file) @@ -51,7 +52,7 @@ def main(): ) continue sys.exit(1) - + category, organization, chart = matched.groups() if category == None or organization == None or chart == None: logError( @@ -80,25 +81,26 @@ def main(): logError( f"Duplicate chart name detected. Unable to build unique package list. trying to add: {new_entry}, current_value: {packages[chart]}" ) - # sys.exit(2) + sys.exit(2) packages[chart] = new_entry - if len(packages.keys()) == 0: logError("the package map contained no items!") sys.exit(3) - + now = datetime.now(timezone.utc).astimezone().isoformat() - + print( to_json( { "generated": now, "packages": packages, }, - sort_keys=True) + sort_keys=True, + ) ) + if __name__ == "__main__": main() diff --git a/scripts/Makefile b/scripts/Makefile new file mode 100644 index 0000000..3c47530 --- /dev/null +++ b/scripts/Makefile @@ -0,0 +1,56 @@ +PY_BIN ?= python3 + +# The virtualenv containing code style tools. +VENV_CODESTYLE = venv.codestyle +VENV_CODESTYLE_BIN = $(VENV_CODESTYLE)/bin + +# The virtualenv containing our CI scripts +VENV_TOOLS = venv.tools +VENV_TOOLS_BIN = $(VENV_TOOLS)/bin + +# This is what we pass to git ls-files. +LS_FILES_INPUT_STR ?= 'src/*.py' + +.PHONY: default +default: format lint + +# The same as format, but will throw a non-zero exit code +# if the formatter had to make changes. +.PHONY: ci.format +ci.format: format + git diff --exit-code + +venv.codestyle: + $(MAKE) venv.codestyle.always-reinstall + +# This target will always install the codestyle venv. +# Useful for development cases. +.PHONY: venv.codestyle.always-reinstall +venv.codestyle.always-reinstall: + $(PY_BIN) -m venv $(VENV_CODESTYLE) + ./$(VENV_CODESTYLE_BIN)/pip install --upgrade \ + black \ + ruff + +.PHONY: format +format: venv.codestyle + ./$(VENV_CODESTYLE_BIN)/black \ + --verbose \ + $$(git ls-files $(LS_FILES_INPUT_STR)) + +.PHONY: lint +lint: venv.codestyle + ./$(VENV_CODESTYLE_BIN)/ruff \ + check \ + $$(git ls-files $(LS_FILES_INPUT_STR)) + +venv.tools: + $(MAKE) venv.tools.always-reinstall + +# This target will always install the tools at the venv. +# Useful for development cases. +.PHONY: venv.tools.always-reinstall +venv.tools.always-reinstall: + $(PY_BIN) -m venv $(VENV_TOOLS) + ./$(VENV_TOOLS_BIN)/pip install -r requirements.txt + ./$(VENV_TOOLS_BIN)/pip install . diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 0000000..cba1003 --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,47 @@ +## Generating New Token for Cluster + +To generate a new token follow these instructions. + +Login using your OpenShift token: + +``` +oc login --token= --server= +``` + +Delete existing service account, roles, and role bindings: + +``` +oc delete -k scripts/config/overlays/prod/ +``` + +Create new service account, roles, and role bindings: + +``` +oc apply -k scripts/config/overlays/prod/ +``` + +There are two secrets associated with the service account, use the note down the +secret with `token` word in it. To see the secrets run this command: + +``` +oc get sa chart-verifier-admin -n prod-chart-verifier-infra -o yaml +``` + +The out should end something like this: +``` +... +secrets: +- name: chart-verifier-admin-token-t9sjg +- name: chart-verifier-admin-dockercfg-zkhzm +``` + +Now you can extract the token with this command: + +``` +oc get secret chart-verifier-admin-token-t9sjg -n prod-chart-verifier-infra -o yaml | yq e '.data.token' - | base64 -d - +``` + +You can store the returned value in the GitHub secrets with key as +`CLUSTER_TOKEN`. + +Alternatively, run `scripts/get-secrets --server --server ` to get `CLUSTER_TOKEN` and `API_SERVER`. diff --git a/scripts/config/base/kustomization.yaml b/scripts/config/base/kustomization.yaml new file mode 100644 index 0000000..828a5b9 --- /dev/null +++ b/scripts/config/base/kustomization.yaml @@ -0,0 +1,2 @@ +resources: + - service-account.yaml diff --git a/scripts/config/base/service-account.yaml b/scripts/config/base/service-account.yaml new file mode 100644 index 0000000..559289f --- /dev/null +++ b/scripts/config/base/service-account.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: chart-verifier-admin diff --git a/scripts/config/overlays/prod/cluster-role-binding.yaml b/scripts/config/overlays/prod/cluster-role-binding.yaml new file mode 100644 index 0000000..67ab9e7 --- /dev/null +++ b/scripts/config/overlays/prod/cluster-role-binding.yaml @@ -0,0 +1,12 @@ +kind: ClusterRoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: prod-chart-verifier-admin +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: prod-chart-verifier-admin +subjects: + - kind: ServiceAccount + name: chart-verifier-admin + namespace: prod-chart-verifier-infra diff --git a/scripts/config/overlays/prod/cluster-role.yaml b/scripts/config/overlays/prod/cluster-role.yaml new file mode 100644 index 0000000..3a5f821 --- /dev/null +++ b/scripts/config/overlays/prod/cluster-role.yaml @@ -0,0 +1,47 @@ +kind: ClusterRole +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: prod-chart-verifier-admin +rules: + - apiGroups: + - "" + resources: + - 'namespaces' + verbs: + - '*' + - apiGroups: + - "" + resources: + - 'serviceaccounts' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'clusterroles' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'clusterrolebindings' + verbs: + - '*' + - apiGroups: + - "*" + resources: + - 'roles' + verbs: + - '*' + - apiGroups: + - "*" + resources: + - 'rolebindings' + verbs: + - '*' + - apiGroups: + - "" + resources: + - 'secrets' + verbs: + - '*' diff --git a/scripts/config/overlays/prod/kustomization.yaml b/scripts/config/overlays/prod/kustomization.yaml new file mode 100644 index 0000000..a13b5d5 --- /dev/null +++ b/scripts/config/overlays/prod/kustomization.yaml @@ -0,0 +1,7 @@ +namespace: prod-chart-verifier-infra +resources: + - namespace.yaml + - cluster-role.yaml + - cluster-role-binding.yaml +bases: + - ../../base diff --git a/scripts/config/overlays/prod/namespace.yaml b/scripts/config/overlays/prod/namespace.yaml new file mode 100644 index 0000000..10dae18 --- /dev/null +++ b/scripts/config/overlays/prod/namespace.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: prod-chart-verifier-infra diff --git a/scripts/config/overlays/test/cluster-role-binding.yaml b/scripts/config/overlays/test/cluster-role-binding.yaml new file mode 100644 index 0000000..9ad2e51 --- /dev/null +++ b/scripts/config/overlays/test/cluster-role-binding.yaml @@ -0,0 +1,12 @@ +kind: ClusterRoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: test-chart-verifier-admin +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: test-chart-verifier-admin +subjects: + - kind: ServiceAccount + name: chart-verifier-admin + namespace: test-chart-verifier-infra diff --git a/scripts/config/overlays/test/cluster-role.yaml b/scripts/config/overlays/test/cluster-role.yaml new file mode 100644 index 0000000..c64c377 --- /dev/null +++ b/scripts/config/overlays/test/cluster-role.yaml @@ -0,0 +1,47 @@ +kind: ClusterRole +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: test-chart-verifier-admin +rules: + - apiGroups: + - "" + resources: + - 'namespaces' + verbs: + - '*' + - apiGroups: + - "" + resources: + - 'serviceaccounts' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'clusterroles' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'clusterrolebindings' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'roles' + verbs: + - '*' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'rolebindings' + verbs: + - '*' + - apiGroups: + - "" + resources: + - 'secrets' + verbs: + - '*' diff --git a/scripts/config/overlays/test/kustomization.yaml b/scripts/config/overlays/test/kustomization.yaml new file mode 100644 index 0000000..0eedff1 --- /dev/null +++ b/scripts/config/overlays/test/kustomization.yaml @@ -0,0 +1,7 @@ +namespace: test-chart-verifier-infra +resources: + - namespace.yaml + - cluster-role.yaml + - cluster-role-binding.yaml +bases: + - ../../base diff --git a/scripts/config/overlays/test/namespace.yaml b/scripts/config/overlays/test/namespace.yaml new file mode 100644 index 0000000..5d0d70c --- /dev/null +++ b/scripts/config/overlays/test/namespace.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: test-chart-verifier-infra diff --git a/scripts/get-secrets b/scripts/get-secrets new file mode 100755 index 0000000..65e16f3 --- /dev/null +++ b/scripts/get-secrets @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +""" Github Actions Secrets Generator +This script generates CLUSTER_TOKEN and API_SERVER for Github Actions +Takes two arguments: --token and --server, which aligns with `oc login` command. +""" + +import argparse +import subprocess +import sys +import os +import re +import urllib.parse + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-t", "--token", dest="token", type=str, required=True, + help="oc login token") + parser.add_argument("-s", "--server", dest="server", type=str, required=True, + help="OpenShift server URL") + args = parser.parse_args() + + out = subprocess.run(["oc", "login", "--insecure-skip-tls-verify", f"--token={args.token}", f"--server={args.server}"], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error logging in with `oc login`: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + else: + print(out.stdout.decode("utf-8"), file=sys.stderr) + + cwd = os.path.dirname(__file__) + out = subprocess.run(["oc", "delete", "-k", cwd+"/config/overlays/prod/"], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error deleting existing service account, roles, and role bindings: {err}" + print(msg, file=sys.stderr) + else: + print(out.stdout.decode("utf-8"), file=sys.stderr) + + out = subprocess.run(["oc", "apply", "-k", cwd+"/config/overlays/prod/"], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error creating new service account, roles, and role bindings: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + else: + print(out.stdout.decode("utf-8"), file=sys.stderr) + + out = subprocess.run(["oc", "get", "sa", "chart-verifier-admin", "-n", "prod-chart-verifier-infra", "-o", "yaml"], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error getting token secret from service account: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + else: + print(out.stdout.decode("utf-8"), file=sys.stderr) + + pattern = re.compile(r"chart\-verifier\-admin\-token\-[\w]+") + secret = pattern.search(out.stdout.decode("utf-8")) + if not secret: + msg = f"[ERROR] Error finding token secret under service account" + print(msg) + sys.exit(1) + + out = subprocess.run(["oc", "get", "secret", secret.group(), "-n", "prod-chart-verifier-infra", "-o", "yaml"], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error getting cluster token secret: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + + print(urllib.parse.unquote(out.stdout.decode("utf-8"))) + out = subprocess.run(["yq", "e", ".data.token", "-"], input=out.stdout, capture_output=True) + + out = subprocess.run(["base64", "-d", "-"], input=out.stdout, capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error base64 decoding cluster token: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + cluster_token = out.stdout.decode("utf-8") + + out = subprocess.run(["echo", "-n", args.server], capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error base64 encoding api server URL: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + + out = subprocess.run(["base64", "-w", "0"], input=out.stdout, capture_output=True) + err = out.stderr.decode("utf-8") + if err.strip(): + msg = f"[ERROR] Error base64 encoding api server URL: {err}" + print(msg, file=sys.stderr) + sys.exit(1) + encoded_api_server = out.stdout.decode("utf-8") + + print(f"CLUSTER_TOKEN: {cluster_token}\n") + print(f"API_SERVER: {encoded_api_server}") + +if __name__ == "__main__": + main() diff --git a/scripts/pyproject.toml b/scripts/pyproject.toml new file mode 100644 index 0000000..fed528d --- /dev/null +++ b/scripts/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 0000000..98e38b4 --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,37 @@ +attrs==21.2.0 +certifi==2020.12.5 +chardet==4.0.0 +docker==6.1.3 +environs==9.5.0 +execnet==1.9.0 +gitdb==4.0.7 +GitPython==3.1.18 +glob2==0.7 +idna==2.10 +iniconfig==1.1.1 +mako==1.2.3 +MarkupSafe==2.0.1 +packaging==21.0 +parse==1.19.0 +parse-type==0.5.2 +pluggy==0.13.1 +psutil==5.8.0 +py==1.10.0 +PyGithub==1.55 +pyparsing==2.4.7 +pytest==6.2.4 +pytest-bdd==4.1.0 +pytest-forked==1.3.0 +pytest-xdist==2.4.0 +PyYAML==6.0.1 +requests==2.26.0 +retrying==1.3.3 +semantic-version==2.8.5 +semver==2.13.0 +six==1.16.0 +smmap==4.0.0 +toml==0.10.2 +urllib3==1.26.5 +websocket-client==1.2.1 +analytics-python==1.4.0 +behave==1.2.6 diff --git a/scripts/ruff.toml b/scripts/ruff.toml new file mode 100644 index 0000000..af87f9d --- /dev/null +++ b/scripts/ruff.toml @@ -0,0 +1,4 @@ +ignore = [ + "E402", # import ordering (komish): import ordering isn't handled by Black so we need to handle this manually. + "E501", # line length (komish): line length is not enforced by Black so we need to handle these manually. +] \ No newline at end of file diff --git a/scripts/setup.cfg b/scripts/setup.cfg new file mode 100644 index 0000000..ebc5ae9 --- /dev/null +++ b/scripts/setup.cfg @@ -0,0 +1,50 @@ +[metadata] +name = rh-chart-repo-manager +version = 0.1.0 +author = Baiju Muthukadan +author_email = baiju.m.mail@gmail.com +description = Chart Repo Manager +long_description = file: README.md +long_description_content_type = text/markdown +url = https://github.com/openshift-helm-charts/repo +project_urls = + Bug Tracker = https://github.com/openshift-helm-charts/repo/issues +classifiers = + Programming Language :: Python :: 3 + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + +[options] +package_dir = + = src +packages = find: +python_requires = >=3.10 +install_requires = + PyYAML + requests + semver + pytest + pytest-bdd + +[options.packages.find] +where = src + +[options.entry_points] +console_scripts = + chart-repo-manager = chartrepomanager.chartrepomanager:main + chart-pr-review = chartprreview.chartprreview:main + check-pr-content = checkprcontent.checkpr:main + pr-artifact = pullrequest.prartifact:main + pr-comment = pullrequest.prepare_pr_comment:main + sa-for-chart-testing = saforcharttesting.saforcharttesting:main + check-auto-merge = checkautomerge.checkautomerge:main + check-pr-for-ci = workflowtesting.checkprforci:main + release-checker = release.releasechecker:main + releaser = release.releaser:main + check-user = owners.checkuser:main + metrics = metrics.metrics:main + get-verify-params = report.get_verify_params:main + pushowners=metrics.pushowners:main + update-index=updateindex.updateindex:main + user-is-repo-owner=owners.user_is_repo_owner:main + diff --git a/scripts/setup.py b/scripts/setup.py new file mode 100644 index 0000000..b908cbe --- /dev/null +++ b/scripts/setup.py @@ -0,0 +1,3 @@ +import setuptools + +setuptools.setup() diff --git a/scripts/src/chartprreview/__init__.py b/scripts/src/chartprreview/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/chartprreview/chartprreview.py b/scripts/src/chartprreview/chartprreview.py new file mode 100644 index 0000000..4d0f7d5 --- /dev/null +++ b/scripts/src/chartprreview/chartprreview.py @@ -0,0 +1,583 @@ +import re +import os +import os.path +import sys +import argparse +import subprocess +import hashlib + +from environs import Env + +import semver +import semantic_version +import requests +import yaml + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +sys.path.append("../") +from report import report_info +from report import verifier_report +from signedchart import signedchart +from pullrequest import prartifact +from reporegex import matchers +from tools import gitutils + + +def write_error_log(directory, *msg): + os.makedirs(directory, exist_ok=True) + with open(os.path.join(directory, "errors"), "w") as fd: + for line in msg: + print(line) + fd.write(line) + fd.write("\n") + + +def get_vendor_type(directory): + vendor_type = os.environ.get("VENDOR_TYPE") + if not vendor_type or vendor_type not in {"partner", "redhat", "community"}: + msg = "[ERROR] Chart files need to be under one of charts/partners, charts/redhat, or charts/community" + write_error_log(directory, msg) + sys.exit(1) + return vendor_type + + +def get_modified_charts(directory, api_url): + """Get the category, organization, chart name, and new version corresponding to + the chart being added. + + Args: + directory (str): Local directory in which to write the error logs + api_url (str): URL of the GitHub PR + + Returns: + (str, str, str, str): category, organization, chart, and version (e.g. partner, + hashicorp, vault, 1.4.0) + """ + print("[INFO] Get modified charts. %s" % directory) + files = prartifact.get_modified_files(api_url) + pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" + ) + for file_path in files: + m = pattern.match(file_path) + if m: + category, organization, chart, version = m.groups() + return category, organization, chart, version + + msg = "[ERROR] One or more files included in the pull request are not part of the chart" + write_error_log(directory, msg) + sys.exit(1) + + +def verify_user(directory, username, category, organization, chart): + """Check that the user that submitted the PR is in the OWNERS file for this chart. + + Args: + directory (str): Local directory in which to write the error logs + username (str): Github username that submitted the PR. + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + """ + print( + "[INFO] Verify user. %s, %s, %s, %s" % (username, category, organization, chart) + ) + owners_path = os.path.join("charts", category, organization, chart, "OWNERS") + if not os.path.exists(owners_path): + msg = f"[ERROR] {owners_path} file does not exist." + write_error_log(directory, msg) + sys.exit(1) + + data = open(owners_path).read() + out = yaml.load(data, Loader=Loader) + if username not in [x["githubUsername"] for x in out["users"]]: + msg = f"[ERROR] {username} is not allowed to submit the chart on behalf of {organization}" + write_error_log(directory, msg) + sys.exit(1) + + +def check_owners_file_against_directory_structure( + directory, category, organization, chart +): + """Check that the content of the OWNERS file correspond to the directory structure + the chart is under. + + Following assertion must be true: + - the chart.name key must correspond to the name of the chart directory + - the vendor.label key must correspond to the organization directory + + Args: + directory (str): Local directory in which to write the error logs + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + """ + print( + "[INFO] Check owners file against directory structure. %s, %s, %s" + % (category, organization, chart) + ) + data = open(os.path.join("charts", category, organization, chart, "OWNERS")).read() + out = yaml.load(data, Loader=Loader) + vendor_label = out["vendor"]["label"] + chart_name = out["chart"]["name"] + error_exit = False + msgs = [] + if organization != vendor_label: + error_exit = True + msgs.append( + f"[ERROR] vendor/label in OWNERS file ({vendor_label}) doesn't match the directory structure (charts/{category}/{organization}/{chart})" + ) + if chart != chart_name: + msgs.append( + f"[ERROR] chart/name in OWNERS file ({chart_name}) doesn't match the directory structure (charts/{category}/{organization}/{chart})" + ) + error_exit = True + if error_exit: + write_error_log(directory, *msgs) + sys.exit(1) + + +def verify_signature(category, organization, chart, version): + """Verify that the PGP signature (report.yaml.asc) can decrypt report.yaml + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + """ + print("[INFO] Verify signature. %s, %s, %s" % (organization, chart, version)) + sign = os.path.join( + "charts", category, organization, chart, version, "report.yaml.asc" + ) + if os.path.exists(sign): + data = open( + os.path.join("charts", category, organization, chart, "OWNERS") + ).read() + out = yaml.load(data, Loader=Loader) + publickey = out.get("publicPgpKey") + if not publickey: + return + with open("public.key", "w") as fd: + fd.write(publickey) + out = subprocess.run(["gpg", "--import", "public.key"], capture_output=True) + print("[INFO]", out.stdout.decode("utf-8")) + print("[WARNING]", out.stderr.decode("utf-8")) + report = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + out = subprocess.run(["gpg", "--verify", sign, report], capture_output=True) + print("[INFO]", out.stdout.decode("utf-8")) + print("[WARNING]", out.stderr.decode("utf-8")) + else: + print(f"[INFO] Signed report not found: {sign}.") + + +def match_checksum( + directory, generated_report_info_path, category, organization, chart, version +): + """Check that the provided report and the generated report have the same chart + digest + + Args: + directory (str): Local directory in which to write the error logs + generated_report_info_path (str): Path to the processed JSON report generated + in the pipeline + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + """ + print("[INFO] Check digests match. %s, %s, %s" % (organization, chart, version)) + submitted_report_path = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + submitted_digests = report_info.get_report_digests( + report_path=submitted_report_path + ) + submitted_digest = submitted_digests["chart"] + + generated_digests = report_info.get_report_digests( + report_info_path=generated_report_info_path + ) + generated_digest = generated_digests["chart"] + + if submitted_digest != generated_digest: + msg = f"[ERROR] Digest is not matching: {submitted_digest}, {generated_digest}" + write_error_log(directory, msg) + sys.exit(1) + + +def check_url(directory, report_path): + """Check that the chart URL provided in report.yaml is valid and that the chart + digest matches the one provided in report.yaml + + Args: + directory (str): Local directory in which to write the error logs + report_path (str): Path to report.yaml + """ + print("[INFO] Check chart_url is a valid url. %s" % report_path) + chart_url = report_info.get_report_chart_url(report_path=report_path) + + try: + r = requests.head(chart_url) + except requests.exceptions.InvalidSchema as err: + msgs = [] + msgs.append(f"Invalid schema: {chart_url}") + msgs.append(str(err)) + write_error_log(directory, *msgs) + sys.exit(1) + except requests.exceptions.InvalidURL as err: + msgs = [] + msgs.append(f"Invalid URL: {chart_url}") + msgs.append(str(err)) + write_error_log(directory, *msgs) + sys.exit(1) + except requests.exceptions.MissingSchema as err: + msgs = [] + msgs.append(f"Missing schema in URL: {chart_url}") + msgs.append(str(err)) + write_error_log(directory, *msgs) + sys.exit(1) + + try: + r.raise_for_status() + except requests.exceptions.HTTPError as err: + msgs = [] + msgs.append("[WARNING] URL is not accessible: {chart_url} ") + msgs.append(str(err)) + write_error_log(directory, *msgs) + + verify_package_digest(chart_url, report_path) + + +def match_name_and_version( + directory, category, organization, chart, version, generated_report_path +): + """Check that the chart name and version in the provided report.yaml and in the + report generated in the pipeline match the underlying directory structure. + + Args: + directory (str): Local directory in which to write the error logs + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + generated_report_path (str): Path to the report generated in the pipeline + """ + print( + "[INFO] Check chart has same name and version as directory structure. %s, %s, %s" + % (organization, chart, version) + ) + submitted_report_path = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + if os.path.exists(submitted_report_path): + submitted_report_chart = report_info.get_report_chart( + report_path=submitted_report_path + ) + submitted_report_chart_name = submitted_report_chart["name"] + submitted_report_chart_version = submitted_report_chart["version"] + + if submitted_report_chart_name != chart: + msg = f"[ERROR] Chart name ({submitted_report_chart_name}) doesn't match the directory structure (charts/{category}/{organization}/{chart}/{version})" + write_error_log(directory, msg) + sys.exit(1) + + if submitted_report_chart_version != version: + msg = f"[ERROR] Chart version ({submitted_report_chart_version}) doesn't match the directory structure (charts/{category}/{organization}/{chart}/{version})" + write_error_log(directory, msg) + sys.exit(1) + + if os.path.exists(generated_report_path): + report_chart = report_info.get_report_chart( + report_path=generated_report_path + ) + report_chart_name = report_chart["name"] + report_chart_version = report_chart["version"] + + if submitted_report_chart_name != report_chart_name: + msg = f"[ERROR] Chart name in the chart is not matching against the value in the report: {submitted_report_chart_name} vs {report_chart_name}" + write_error_log(directory, msg) + sys.exit(1) + + if submitted_report_chart_version != report_chart_version: + msg = f"[ERROR] Chart version in the chart is not matching against the value in the report: {submitted_report_chart_version} vs. {report_chart_version}" + write_error_log(directory, msg) + sys.exit(1) + else: + print(f"[INFO] No report submitted, get data from : {generated_report_path}") + report_chart = report_info.get_report_chart(report_path=generated_report_path) + report_chart_name = report_chart["name"] + report_chart_version = report_chart["version"] + + if report_chart_name != chart: + msg = f"[ERROR] Chart name ({report_chart_name}) doesn't match the directory structure (charts/{category}/{organization}/{chart}/{version})" + write_error_log(directory, msg) + sys.exit(1) + + if report_chart_version != version: + msg = f"[ERROR] Chart version ({report_chart_version}) doesn't match the directory structure (charts/{category}/{organization}/{chart}/{version})" + write_error_log(directory, msg) + sys.exit(1) + + +def check_report_success(directory, api_url, report_path, report_info_path, version): + """Check the content of report.yaml + + * Check that the version in the report matches with the directory structure. + * Check that the vendor type in the report matches with the directory structure. + * Check the presence of the required annotations. + * Check that the report doesn't contains failed checks. + * Check that the testedOpenShiftVersion and certifiedOpenShiftVersions labels + contain SemVer compatible versions. + + Also adds the content of report.yaml to the GITHUB_OUTPUT. + + Args: + directory (str): Local directory in which to write the error logs + api_url (str): URL of the GitHub PR + report_path (str): Path to report.yaml + report_info_path (str): Path to processed JSON report + version (str): The version of the chart (ex: 1.4.0) + """ + print("[INFO] Check report success. %s" % report_path) + data = open(report_path).read() + print("[INFO] Full report: ") + print(data) + quoted_data = data.replace("%", "%25").replace("\n", "%0A").replace("\r", "%0D") + gitutils.add_output("report_content", quoted_data) + + chart = report_info.get_report_chart( + report_path=report_path, report_info_path=report_info_path + ) + report_version = chart["version"] + if report_version != version: + msg = f"[ERROR] Chart Version '{report_version}' doesn't match the version in the directory path: '{version}'" + write_error_log(directory, msg) + sys.exit(1) + + report_metadata = report_info.get_report_metadata( + report_path=report_path, report_info_path=report_info_path + ) + profile_version = report_metadata["profileVersion"] + vendor_type = get_vendor_type(directory) + report_vendor_type = report_metadata["vendorType"] + + if report_vendor_type != vendor_type: + msg = f"[ERROR] Report profile type '{report_vendor_type}' doesn't match the vendor type in the directory path: '{vendor_type}'" + write_error_log(directory, msg) + sys.exit(1) + + print(f"[INFO] Profile version: {profile_version}") + annotations = report_info.get_report_annotations( + report_path=report_path, report_info_path=report_info_path + ) + + required_annotations = { + "charts.openshift.io/lastCertifiedTimestamp", + "charts.openshift.io/testedOpenShiftVersion", + "charts.openshift.io/supportedOpenShiftVersions", + "charts.openshift.io/digest", + } + + if profile_version == "v1.0": + required_annotations = { + "charts.openshift.io/lastCertifiedTimestamp", + "charts.openshift.io/certifiedOpenShiftVersions", + "charts.openshift.io/digest", + } + + available_annotations = set(annotations.keys()) + + missing_annotations = required_annotations - available_annotations + for annotation in missing_annotations: + msg = f"[ERROR] Missing annotation in chart/report: {annotation}" + write_error_log(directory, msg) + sys.exit(1) + + report = report_info.get_report_results( + report_path=report_path, + report_info_path=report_info_path, + profile_type=vendor_type, + ) + + label_names = prartifact.get_labels(api_url) + + failed = report["failed"] + passed = report["passed"] + failures_in_report = failed > 0 + if failures_in_report: + msgs = [] + msgs.append("[ERROR] Chart verifier report includes failures:") + msgs.append(f"- Number of checks passed: {passed}") + msgs.append(f"- Number of checks failed: {failed}") + msgs.append("- Error message(s):") + for m in report["message"]: + msgs.append(f" - {m}") + write_error_log(directory, *msgs) + if vendor_type == "redhat": + gitutils.add_output("redhat_to_community", "True") + if vendor_type != "redhat" and "force-publish" not in label_names: + if vendor_type == "community": + # requires manual review and approval + gitutils.add_output("community_manual_review_required", "True") + sys.exit(1) + + if vendor_type == "community" and "force-publish" not in label_names: + # requires manual review and approval + print("[INFO] Community submission requires manual approval.") + gitutils.add_output("community_manual_review_required", "True") + sys.exit(1) + + if failures_in_report or vendor_type == "community": + return + + if "charts.openshift.io/testedOpenShiftVersion" in annotations: + full_version = annotations["charts.openshift.io/testedOpenShiftVersion"] + try: + semantic_version.Version.coerce(full_version) + except ValueError: + msg = f"[ERROR] tested OpenShift version not conforming to SemVer spec: {full_version}" + write_error_log(directory, msg) + sys.exit(1) + + if "charts.openshift.io/certifiedOpenShiftVersions" in annotations: + full_version = annotations["charts.openshift.io/certifiedOpenShiftVersions"] + if not semver.VersionInfo.isvalid(full_version): + msg = f"[ERROR] certified OpenShift version not conforming to SemVer spec: {full_version}" + write_error_log(directory, msg) + sys.exit(1) + + +def verify_package_digest(url, report): + print("[INFO] check package digest.") + + response = requests.get(url, allow_redirects=True) + if response.status_code == 200: + target_digest = hashlib.sha256(response.content).hexdigest() + + found, report_data = verifier_report.get_report_data(report) + if found: + pkg_digest = verifier_report.get_package_digest(report_data) + + if target_digest: + if pkg_digest and pkg_digest != target_digest: + # Digest was passed and computed but differ + raise Exception( + "Found an integrity issue. SHA256 digest passed does not match SHA256 digest computed." + ) + elif not pkg_digest: + # Digest was not passed and could not be computed + raise Exception( + "Was unable to compute SHA256 digest, please ensure chart url points to a chart package." + ) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-d", + "--directory", + dest="directory", + type=str, + required=True, + help="artifact directory for archival", + ) + parser.add_argument( + "-n", + "--verify-user", + dest="username", + type=str, + required=True, + help="check if the user can update the chart", + ) + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + args = parser.parse_args() + + category, organization, chart, version = get_modified_charts( + args.directory, args.api_url + ) + verify_user(args.directory, args.username, category, organization, chart) + check_owners_file_against_directory_structure( + args.directory, category, organization, chart + ) + + report_generated = os.environ.get("REPORT_GENERATED") + generated_report_path = os.environ.get("GENERATED_REPORT_PATH") + generated_report_info_path = os.environ.get("REPORT_SUMMARY_PATH") + env = Env() + web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) + + submitted_report_path = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + if os.path.exists(submitted_report_path): + ocp_version_range = os.environ.get("OCP_VERSION_RANGE") + report_valid, message = verifier_report.validate( + submitted_report_path, ocp_version_range + ) + if not report_valid: + msg = f"Submitted report is not valid: {message}" + print(f"[ERROR] {msg}") + write_error_log(args.directory, msg) + sys.exit(1) + + print("[INFO] Submitted report passed validity check!") + owners_file = os.path.join("charts", category, organization, chart, "OWNERS") + pgp_key_in_owners = signedchart.get_pgp_key_from_owners(owners_file) + if pgp_key_in_owners: + if signedchart.check_report_for_signed_chart(submitted_report_path): + if not signedchart.check_pgp_public_key( + pgp_key_in_owners, submitted_report_path + ): + msg = "PGP key in OWNERS file does not match with key digest in report." + print(f"[ERROR] {msg}") + write_error_log(args.directory, msg) + sys.exit(1) + else: + print( + "[INFO] PGP key in OWNERS file matches with key digest in report." + ) + + print("[INFO] Report exists: ", submitted_report_path) + verify_signature(category, organization, chart, version) + report_path = submitted_report_path + report_info_path = "" + if report_generated and report_generated == "True": + match_checksum( + args.directory, + generated_report_info_path, + category, + organization, + chart, + version, + ) + elif not web_catalog_only: + check_url(args.directory, report_path) + else: + print("[INFO] Report does not exist: ", submitted_report_path) + report_path = generated_report_path + report_info_path = generated_report_info_path + + print(f"[INFO]: report path: {report_path}") + print(f"[INFO]: generated report path: {generated_report_path}") + print(f"[INFO]: generated report info: {generated_report_info_path}") + + match_name_and_version( + args.directory, category, organization, chart, version, generated_report_path + ) + check_report_success( + args.directory, args.api_url, report_path, report_info_path, version + ) diff --git a/scripts/src/chartprreview/chartprreview_test.py b/scripts/src/chartprreview/chartprreview_test.py new file mode 100644 index 0000000..e641aa5 --- /dev/null +++ b/scripts/src/chartprreview/chartprreview_test.py @@ -0,0 +1,88 @@ +import os +import pytest +from chartprreview.chartprreview import verify_user +from chartprreview.chartprreview import check_owners_file_against_directory_structure +from chartprreview.chartprreview import write_error_log + + +def test_verify_user(): + with pytest.raises(SystemExit): + verify_user("mbaiju", "partners", "test-org1", "test-chart") + + +owners_with_wrong_vendor_label = """\ +--- +chart: + name: test-chart + shortDescription: Lorem ipsum +publicPgpKey: | + users: + - githubUsername: baijum + - githubUsername: someuserdoesnotexist1234 +vendor: + label: test-org-wrong + name: Test Org +""" + +owners_with_wrong_chart_name = """\ +--- +chart: + name: test-chart-wrong + shortDescription: Lorem ipsum +publicPgpKey: | + users: + - githubUsername: baijum + - githubUsername: someuserdoesnotexist1234 +vendor: + label: test-org + name: Test Org +""" + +owners_with_correct_values = """\ +--- +chart: + name: test-chart + shortDescription: Lorem ipsum +publicPgpKey: | + users: + - githubUsername: baijum + - githubUsername: someuserdoesnotexist1234 +vendor: + label: test-org + name: Test Org +""" + + +def test_check_owners_file_against_directory_structure(tmpdir): + p = ( + tmpdir.mkdir("charts") + .mkdir("partners") + .mkdir("test-org") + .mkdir("test-chart") + .join("OWNERS") + ) + p.write(owners_with_wrong_vendor_label) + os.chdir(tmpdir) + new_cwd = os.getcwd() + print("new_cwd", new_cwd) + with pytest.raises(SystemExit): + check_owners_file_against_directory_structure( + "partners", "test-org", "test-chart" + ) + p.write(owners_with_wrong_chart_name) + with pytest.raises(SystemExit): + check_owners_file_against_directory_structure( + "partners", "test-org", "test-chart" + ) + p.write(owners_with_correct_values) + check_owners_file_against_directory_structure("partners", "test-org", "test-chart") + + +def test_write_error_log(tmpdir): + write_error_log(tmpdir, "First message") + msg = open(os.path.join(tmpdir, "errors")).read() + assert msg == "First message\n" + + write_error_log(tmpdir, "First message", "Second message") + msg = open(os.path.join(tmpdir, "errors")).read() + assert msg == "First message\nSecond message\n" diff --git a/scripts/src/chartrepomanager/__init__.py b/scripts/src/chartrepomanager/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/chartrepomanager/chartrepomanager.py b/scripts/src/chartrepomanager/chartrepomanager.py new file mode 100644 index 0000000..02c9802 --- /dev/null +++ b/scripts/src/chartrepomanager/chartrepomanager.py @@ -0,0 +1,518 @@ +"""This file prepares the GitHub release and Index update steps. + +All artifacts that go into a GitHub release are prepared: +* The report.yaml. If not provided by the user, a report has been generated at an + earlier step and is included in the release. +* The public signing key, if provided by the user. +* The chart's sources, if provided by the user. +* The provenance file, if provided by the user. + +A GitHub release will be created in a later step, if the user hasn't selected the "Web +Catalog Only" option. + +An Index entry is prepared for this chart and version, either: +* Crafted from the Chart.yaml, if the chart's sources have been provided by the user. +* Prepared from scratch using information from the report.yaml. + +The index entry and chart URL are made available as GitHub outputs and the actual index +update occurs in a later step. + +""" + +import argparse +import base64 +import json +import shutil +import os +import sys +import re +import subprocess +import tempfile +import time +import urllib.parse +from environs import Env + +import yaml + +try: + from yaml import CLoader as Loader, CDumper as Dumper +except ImportError: + from yaml import Loader, Dumper + +sys.path.append("../") +from report import report_info +from chartrepomanager import indexannotations +from signedchart import signedchart +from pullrequest import prartifact +from reporegex import matchers +from tools import gitutils + + +def _encode_chart_entry(chart_entry): + """Encode the chart_entry to base64. This is needed to pass it as an argument to + the update index step. + + Args: + chart_entry (dict): the index entry for this chart to encode + + Returns: + str: The encoded base64 string equivalent. + + """ + chart_entry_str = json.dumps(chart_entry) + chart_entry_bytes = chart_entry_str.encode() + + # Decoding to string for the GITHUB_OUTPUT + return base64.b64encode(chart_entry_bytes).decode() + + +def get_modified_charts(api_url): + """Get the category, organization, chart name, and new version corresponding to + the chart being added or modified by this PR. + + Args: + api_url (str): URL of the GitHub PR + + Returns: + (str, str, str, str): category, organization, chart, and version (e.g. partner, + hashicorp, vault, 1.4.0) + """ + files = prartifact.get_modified_files(api_url) + pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" + ) + for file_path in files: + m = pattern.match(file_path) + if m: + category, organization, chart, version = m.groups() + return category, organization, chart, version + + print("No modified files found.") + sys.exit(0) + + +def check_chart_source_or_tarball_exists(category, organization, chart, version): + """Check if the chart's source or chart's tarball is present + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + + Returns: + (bool, bool): First boolean indicates the presence of the chart's source + Second boolean indicates the presence of the chart's tarball + """ + src = os.path.join("charts", category, organization, chart, version, "src") + if os.path.exists(src): + return True, False + + tarball = os.path.join( + "charts", category, organization, chart, version, f"{chart}-{version}.tgz" + ) + if os.path.exists(tarball): + return False, True + + return False, False + + +def check_report_exists(category, organization, chart, version): + """Check if a report was provided by the user + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + + Returns: + (bool, str): a boolean set to True if the report.yaml file is present, and the + path to the report.yaml file. + """ + report_path = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + return os.path.exists(report_path), report_path + + +def generate_report(): + """Creates report file using the content generated by chart-pr-review + + Returns: + str: Path to the report.yaml file. + """ + cwd = os.getcwd() + report_content = urllib.parse.unquote(os.environ.get("REPORT_CONTENT")) + print("[INFO] Report content:") + print(report_content) + report_path = os.path.join(cwd, "report.yaml") + with open(report_path, "w") as fd: + fd.write(report_content) + return report_path + + +def prepare_chart_source_for_release(category, organization, chart, version): + """Create an archive file of the Chart for the GitHub release. + + When the PR contains the chart's source, we package it using "helm package" and + place the archive file in the ".cr-release-packages" directory. This directory will + contain all assets that should be uploaded as a GitHub Release. + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + """ + print( + "[INFO] prepare chart source for release. %s, %s, %s, %s" + % (category, organization, chart, version) + ) + path = os.path.join("charts", category, organization, chart, version, "src") + out = subprocess.run(["helm", "package", path], capture_output=True) + print(out.stdout.decode("utf-8")) + print(out.stderr.decode("utf-8")) + chart_file_name = f"{chart}-{version}.tgz" + try: + os.remove(os.path.join(".cr-release-packages", chart_file_name)) + except FileNotFoundError: + pass + shutil.copy(f"{chart}-{version}.tgz", f".cr-release-packages/{chart_file_name}") + + +def prepare_chart_tarball_for_release( + category, organization, chart, version, signed_chart +): + """Move the provided tarball (and signing key if needed) to the release directory + + The tarball is moved to the ".cr-release-packages" directory. If the archive has + been signed with "helm package --sign", the provenance file is also included. + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart (str): Name of the chart (ex: vault) + version (str): The version of the chart (ex: 1.4.0) + signed_chart (bool): Set to True if the tarball chart is signed. + + Returns: + str: Path to the public key file used to sign the tarball + """ + print( + "[INFO] prepare chart tarball for release. %s, %s, %s, %s" + % (category, organization, chart, version) + ) + chart_file_name = f"{chart}-{version}.tgz" + # Path to the provided tarball + path = os.path.join( + "charts", category, organization, chart, version, chart_file_name + ) + try: + os.remove(os.path.join(".cr-release-packages", chart_file_name)) + except FileNotFoundError: + pass + shutil.copy(path, f".cr-release-packages/{chart_file_name}") + shutil.copy(path, chart_file_name) + + if signed_chart: + print("[INFO] Signed chart - include PROV file") + prov_file_name = f"{chart_file_name}.prov" + path = os.path.join( + "charts", category, organization, chart, version, prov_file_name + ) + try: + os.remove(os.path.join(".cr-release-packages", prov_file_name)) + except FileNotFoundError: + pass + shutil.copy(path, f".cr-release-packages/{prov_file_name}") + shutil.copy(path, prov_file_name) + gitutils.add_output( + "prov_file_name", + os.path.join(os.getcwd(), ".cr-release-packages", prov_file_name), + ) + return get_key_file(category, organization, chart, version) + return "" + + +def get_key_file(category, organization, chart, version): + owners_path = os.path.join("charts", category, organization, chart, "OWNERS") + key_in_owners = signedchart.get_pgp_key_from_owners(owners_path) + if key_in_owners: + key_file_name = f"{chart}-{version}.tgz.key" + print(f"[INFO] Signed chart - add public key file : {key_file_name}") + signedchart.create_public_key_file(key_in_owners, key_file_name) + return key_file_name + return "" + + +def create_index_from_chart(chart_file_name): + """Prepare the index entry for this chart + + Given that a chart tarball could be created (i.e. the user provided either the + chart's source or tarball), the content of Chart.yaml is used for the index entry. + + Args: + chart_file_name (str): Name of the chart's archive + + Returns: + dict: content of Chart.yaml, to be used as index entry. + """ + print("[INFO] create index from chart. %s" % (chart_file_name)) + out = subprocess.run( + [ + "helm", + "show", + "chart", + os.path.join(".cr-release-packages", chart_file_name), + ], + capture_output=True, + ) + p = out.stdout.decode("utf-8") + print(p) + print(out.stderr.decode("utf-8")) + crt = yaml.load(p, Loader=Loader) + return crt + + +def create_index_from_report(category, ocp_version_range, report_path): + """Prepare the index entry for this chart. + + In the case only a report was provided by the user, we need to craft an index entry + for this chart. + + To that end, this function performs the following actions: + * Get the list of annotations from report file + * Override / set additional annotations: + * Replaces the certifiedOpenShiftVersions annotation with the + testedOpenShiftVersion annotation. + * Adds supportedOpenShiftVersions if not already set. + * Adds (overrides) providerType. + * Use report.medatata.chart as a base for the index entry + * Merge (override) annotations into the index entry' annotations + * Add digest to index entry if known. + + Args: + category (str): Type of profile (community, partners, or redhat) + ocp_version_range (str): Range of supported OCP versions + report_path (str): Path to the report.yaml file + + Returns: + dict: Index entry for this chart + + """ + print( + "[INFO] create index from report. %s, %s, %s" + % (category, ocp_version_range, report_path) + ) + + annotations = indexannotations.getIndexAnnotations(ocp_version_range, report_path) + + print("category:", category) + redhat_to_community = bool(os.environ.get("REDHAT_TO_COMMUNITY")) + if category == "partners": + annotations["charts.openshift.io/providerType"] = "partner" + elif category == "redhat" and redhat_to_community: + annotations["charts.openshift.io/providerType"] = "community" + else: + annotations["charts.openshift.io/providerType"] = category + + chart_entry = report_info.get_report_chart(report_path) + if "annotations" in chart_entry: + annotations = chart_entry["annotations"] | annotations + + chart_entry["annotations"] = annotations + + digests = report_info.get_report_digests(report_path) + if "package" in digests: + chart_entry["digest"] = digests["package"] + + return chart_entry + + +def update_chart_annotation( + category, organization, chart_file_name, chart, ocp_version_range, report_path +): + """Untar the helm release that was placed under .cr-release-packages, update the + chart's annotations, and repackage the Helm release. + + In particular, following manipulations are performed on annotations: + * Gets the dict of annotations from the report file. + * Replaces the certifiedOpenShiftVersions annotation with the + testedOpenShiftVersion annotation. + * Adds supportedOpenShiftVersions if not already set. + * Adds (overrides) providerType. + * Adds provider if not already set. + * Merge (overrides) those annotations into the Chart's annotations. + + Args: + category (str): Type of profile (community, partners, or redhat) + organization (str): Name of the organization (ex: hashicorp) + chart_file_name (str): Name of the chart's archive + chart (str): Name of the chart (ex: vault) + ocp_version_range (str): Range of supported OCP versions + report_path (str): Path to the report.yaml file + """ + print( + "[INFO] Update chart annotation. %s, %s, %s, %s, %s" + % (category, organization, chart_file_name, chart, ocp_version_range) + ) + dr = tempfile.mkdtemp(prefix="annotations-") + + annotations = indexannotations.getIndexAnnotations(ocp_version_range, report_path) + + print("category:", category) + redhat_to_community = bool(os.environ.get("REDHAT_TO_COMMUNITY")) + if category == "partners": + annotations["charts.openshift.io/providerType"] = "partner" + elif category == "redhat" and redhat_to_community: + annotations["charts.openshift.io/providerType"] = "community" + else: + annotations["charts.openshift.io/providerType"] = category + + if "charts.openshift.io/provider" not in annotations: + data = open( + os.path.join("charts", category, organization, chart, "OWNERS") + ).read() + out = yaml.load(data, Loader=Loader) + vendor_name = out["vendor"]["name"] + annotations["charts.openshift.io/provider"] = vendor_name + + out = subprocess.run( + [ + "tar", + "zxvf", + os.path.join(".cr-release-packages", f"{chart_file_name}"), + "-C", + dr, + ], + capture_output=True, + ) + print(out.stdout.decode("utf-8")) + print(out.stderr.decode("utf-8")) + + fd = open(os.path.join(dr, chart, "Chart.yaml")) + data = yaml.load(fd, Loader=Loader) + + if "annotations" not in data: + data["annotations"] = annotations + else: + # merge the existing annotations with our new ones, overwriting + # values for overlapping keys with our own. + # Overwriting is important because the chart may contain values that we + # must override, such as the providerType which changes in redhat-to-community cases. + # |= syntax requires py3.9 + data["annotations"] |= annotations + out = yaml.dump(data, Dumper=Dumper) + with open(os.path.join(dr, chart, "Chart.yaml"), "w") as fd: + fd.write(out) + + out = subprocess.run( + ["helm", "package", os.path.join(dr, chart)], capture_output=True + ) + print(out.stdout.decode("utf-8")) + print(out.stderr.decode("utf-8")) + + try: + os.remove(os.path.join(".cr-release-packages", chart_file_name)) + except FileNotFoundError: + pass + + shutil.move(chart_file_name, ".cr-release-packages") + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-r", + "--repository", + dest="repository", + type=str, + required=True, + help="Git Repository", + ) + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + args = parser.parse_args() + category, organization, chart, version = get_modified_charts(args.api_url) + chart_source_exists, chart_tarball_exists = check_chart_source_or_tarball_exists( + category, organization, chart, version + ) + + env = Env() + web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) + ocp_version_range = os.environ.get("OCP_VERSION_RANGE", "N/A") + + print(f"[INFO] webCatalogOnly/providerDelivery is {web_catalog_only}") + + public_key_file = "" + print("[INFO] Report Content : ", os.environ.get("REPORT_CONTENT")) + if chart_source_exists or chart_tarball_exists: + if chart_source_exists: + prepare_chart_source_for_release(category, organization, chart, version) + if chart_tarball_exists: + signed_chart = signedchart.is_chart_signed(args.api_url, "") + public_key_file = prepare_chart_tarball_for_release( + category, organization, chart, version, signed_chart + ) + + chart_file_name = f"{chart}-{version}.tgz" + tarball_path = os.path.join( + os.getcwd(), ".cr-release-packages", chart_file_name + ) + gitutils.add_output("path_to_chart_tarball", tarball_path) + + print("[INFO] Check if report exist as part of the commit") + report_exists, report_path = check_report_exists( + category, organization, chart, version + ) + + if report_exists: + shutil.copy(report_path, "report.yaml") + else: + print("[INFO] Generate report") + report_path = generate_report() + + print("[INFO] Updating chart annotation") + update_chart_annotation( + category, + organization, + chart_file_name, + chart, + ocp_version_range, + report_path, + ) + chart_url = f"https://github.com/{args.repository}/releases/download/{organization}-{chart}-{version}/{chart_file_name}" + print("[INFO] Creating index from chart") + chart_entry = create_index_from_chart(chart_file_name) + else: + report_path = os.path.join( + "charts", category, organization, chart, version, "report.yaml" + ) + print(f"[INFO] Report only PR: {report_path}") + shutil.copy(report_path, "report.yaml") + if signedchart.check_report_for_signed_chart(report_path): + public_key_file = get_key_file(category, organization, chart, version) + print("[INFO] Creating index from report") + chart_url = report_info.get_report_chart_url(report_path) + chart_entry = create_index_from_report(category, ocp_version_range, report_path) + + if not web_catalog_only: + current_dir = os.getcwd() + gitutils.add_output("report_file", f"{current_dir}/report.yaml") + if public_key_file: + print(f"[INFO] Add key file for release : {current_dir}/{public_key_file}") + gitutils.add_output("public_key_file", f"{current_dir}/{public_key_file}") + + gitutils.add_output("chart_entry", _encode_chart_entry(chart_entry)) + gitutils.add_output("chart_url", chart_url) + gitutils.add_output("version", version) + + print("Sleeping for 10 seconds") + time.sleep(10) diff --git a/scripts/src/chartrepomanager/indexannotations.py b/scripts/src/chartrepomanager/indexannotations.py new file mode 100644 index 0000000..9533be9 --- /dev/null +++ b/scripts/src/chartrepomanager/indexannotations.py @@ -0,0 +1,50 @@ +import sys +import semantic_version + +sys.path.append("../") +from report import report_info + + +def getIndexAnnotations(ocp_version_range, report_path): + """Get the annotations set in the report file. + + This function replaces the certifiedOpenShiftVersions annotation with the + testedOpenShiftVersion annotation. It also adds the + supportedOpenShiftVersions in the case it is not already set. + + It leaves all other annotations untouched. + + Args: + ocp_version_range (str): Range of supported OCP versions + report_path (str): Path to the report.yaml file + + Returns: + dict: mapping of annotations names to their values + """ + annotations = report_info.get_report_annotations(report_path) + + set_annotations = {} + OCPSupportedSet = False + for annotation in annotations: + if annotation == "charts.openshift.io/certifiedOpenShiftVersions": + full_version = annotations[annotation] + if full_version != "N/A" and semantic_version.validate(full_version): + ver = semantic_version.Version(full_version) + set_annotations[ + "charts.openshift.io/testedOpenShiftVersion" + ] = f"{ver.major}.{ver.minor}" + else: + set_annotations[ + "charts.openshift.io/testedOpenShiftVersion" + ] = annotations[annotation] + else: + if annotation == "charts.openshift.io/supportedOpenShiftVersions": + OCPSupportedSet = True + set_annotations[annotation] = annotations[annotation] + + if not OCPSupportedSet: + set_annotations[ + "charts.openshift.io/supportedOpenShiftVersions" + ] = ocp_version_range + + return set_annotations diff --git a/scripts/src/checkautomerge/__init__.py b/scripts/src/checkautomerge/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/checkautomerge/checkautomerge.py b/scripts/src/checkautomerge/checkautomerge.py new file mode 100644 index 0000000..25f61b9 --- /dev/null +++ b/scripts/src/checkautomerge/checkautomerge.py @@ -0,0 +1,45 @@ +import time +import sys +import argparse +import os + +import requests + + +def ensure_pull_request_not_merged(api_url): + # api_url https://api.github.com/repos///pulls/1 + headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f'Bearer {os.environ.get("BOT_TOKEN")}', + } + merged = False + for i in range(20): + r = requests.get(api_url, headers=headers) + response_content = r.json() + if "message" in response_content: + print(f'[ERROR] merge status: {response_content["message"]}') + sys.exit(1) + + if response_content["merged"]: + merged = True + break + + time.sleep(10) + + if not merged: + print("[ERROR] Pull request not merged") + sys.exit(1) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + args = parser.parse_args() + ensure_pull_request_not_merged(args.api_url) diff --git a/scripts/src/checkprcontent/__init__.py b/scripts/src/checkprcontent/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/checkprcontent/checkpr.py b/scripts/src/checkprcontent/checkpr.py new file mode 100644 index 0000000..4ecc69b --- /dev/null +++ b/scripts/src/checkprcontent/checkpr.py @@ -0,0 +1,297 @@ +import re +import os +import sys +import argparse +import json + +import requests +import semver +import yaml + +from reporegex import matchers + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +sys.path.append("../") +from owners import owners_file +from report import verifier_report +from pullrequest import prartifact +from tools import gitutils + +ALLOW_CI_CHANGES = "allow/ci-changes" + + +def check_web_catalog_only(report_in_pr, num_files_in_pr, report_file_match): + print(f"[INFO] report in PR {report_in_pr}") + print(f"[INFO] num files in PR {num_files_in_pr}") + + category, organization, chart, version = report_file_match.groups() + + print(f"read owners file : {category}/{organization}/{chart}") + found_owners, owner_data = owners_file.get_owner_data(category, organization, chart) + + if found_owners: + owner_web_catalog_only = owners_file.get_web_catalog_only(owner_data) + print( + f"[INFO] webCatalogOnly/providerDelivery from OWNERS : {owner_web_catalog_only}" + ) + else: + msg = "[ERROR] OWNERS file was not found." + print(msg) + gitutils.add_output("owners-error-message", msg) + sys.exit(1) + + if report_in_pr: + report_file_path = os.path.join( + "pr-branch", "charts", category, organization, chart, version, "report.yaml" + ) + print(f"read report file : {report_file_path}") + found_report, report_data = verifier_report.get_report_data(report_file_path) + + if found_report: + report_web_catalog_only = verifier_report.get_web_catalog_only(report_data) + print( + f"[INFO] webCatalogOnly/providerDelivery from report : {report_web_catalog_only}" + ) + else: + msg = f"[ERROR] Failed tp open report: {report_file_path}." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + + web_catalog_only = False + if report_in_pr and num_files_in_pr > 1: + if report_web_catalog_only or owner_web_catalog_only: + msg = "[ERROR] The web catalog distribution method requires the pull request to be report only." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + elif report_in_pr: + if report_web_catalog_only and owner_web_catalog_only: + if verifier_report.get_package_digest(report_data): + web_catalog_only = True + else: + msg = "[ERROR] The web catalog distribution method requires a package digest in the report." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + elif report_web_catalog_only: + msg = "[ERROR] Report indicates web catalog only but the distribution method set for the chart is not web catalog only." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + elif owner_web_catalog_only: + msg = "[ERROR] The web catalog distribution method is set for the chart but is not set in the report." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + + if web_catalog_only: + print("[INFO] webCatalogOnly/providerDelivery is a go") + gitutils.add_output("web_catalog_only", "True") + else: + gitutils.add_output("web_catalog_only", "False") + print("[INFO] webCatalogOnly/providerDelivery is a no-go") + + +def get_file_match_compiled_patterns(): + """Return a tuple of patterns, where the first can be used to match any file in a chart PR + and the second can be used to match a valid report file within a chart PR. The patterns + match based on the relative path of a file to the base repository + + Both patterns capture chart type, chart vendor, chart name and chart version from the file path.. + + Examples of valid file paths are: + + charts/partners/hashicorp/vault/0.20.0/ + charts/partners/hashicorp/vault/0.20.0//report.yaml + """ + + base = matchers.submission_path_matcher() + + pattern = re.compile(base + r"/.*") + reportpattern = re.compile(base + r"/report.yaml") + tarballpattern = re.compile(base + r"/(.*\.tgz$)") + return pattern, reportpattern, tarballpattern + + +def ensure_only_chart_is_modified(api_url, repository, branch): + label_names = prartifact.get_labels(api_url) + for label_name in label_names: + if label_name == ALLOW_CI_CHANGES: + return + + files = prartifact.get_modified_files(api_url) + pattern, reportpattern, tarballpattern = get_file_match_compiled_patterns() + matches_found = 0 + report_found = False + none_chart_files = {} + + for file_path in files: + match = pattern.match(file_path) + if not match: + file_name = os.path.basename(file_path) + none_chart_files[file_name] = file_path + else: + matches_found += 1 + if reportpattern.match(file_path): + print(f"[INFO] Report found: {file_path}") + gitutils.add_output("report-exists", "true") + report_found = True + else: + tar_match = tarballpattern.match(file_path) + if tar_match: + print(f"[INFO] tarball found: {file_path}") + _, _, chart_name, chart_version, tar_name = tar_match.groups() + expected_tar_name = f"{chart_name}-{chart_version}.tgz" + if tar_name != expected_tar_name: + msg = f"[ERROR] the tgz file is named incorrectly. Expected: {expected_tar_name}. Got: {tar_name}" + print(msg) + gitutils.add_output("pr-content-error-message", msg) + exit(1) + + if matches_found == 1: + pattern_match = match + elif pattern_match.groups() != match.groups(): + msg = "[ERROR] A PR must contain only one chart. Current PR includes files for multiple charts." + print(msg) + gitutils.add_output("pr-content-error-message", msg) + exit(1) + + if none_chart_files: + if ( + len(files) > 1 or "OWNERS" not in none_chart_files + ): # OWNERS not present or preset but not the only file + example_file = list(none_chart_files.values())[0] + msg = f"[ERROR] PR includes one or more files not related to charts, e.g., {example_file}" + print(msg) + gitutils.add_output("pr-content-error-message", msg) + + if "OWNERS" in none_chart_files: + file_path = none_chart_files["OWNERS"] + path_parts = file_path.split("/") + category = path_parts[1] # Second after charts + if category == "partners": + msg = "[ERROR] OWNERS file should never be set directly by partners. See certification docs." + print(msg) + gitutils.add_output("owners-error-message", msg) + elif ( + matches_found > 0 + ): # There is a mix of chart and non-chart files including OWNERS + msg = "[ERROR] Send OWNERS file by itself in a separate PR." + print(msg) + gitutils.add_output("owners-error-message", msg) + elif len(files) == 1: # OWNERS file is the only file in PR + msg = "[INFO] OWNERS file changes require manual review by maintainers." + print(msg) + gitutils.add_output("owners-error-message", msg) + + sys.exit(1) + + check_web_catalog_only(report_found, matches_found, pattern_match) + + if matches_found > 0: + category, organization, chart, version = pattern_match.groups() + gitutils.add_output( + "category", f"{'partner' if category == 'partners' else category}" + ) + gitutils.add_output("organization", organization) + + if not semver.VersionInfo.isvalid(version): + msg = ( + f"[ERROR] Helm chart version is not a valid semantic version: {version}" + ) + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + + print("Downloading index.yaml", category, organization, chart, version) + r = requests.get( + f"https://raw.githubusercontent.com/{repository}/{branch}/index.yaml" + ) + + if r.status_code == 200: + data = yaml.load(r.text, Loader=Loader) + else: + data = {"apiVersion": "v1", "entries": {}} + + entry_name = f"{organization}-{chart}" + d = data["entries"].get(entry_name, []) + gitutils.add_output("chart-entry-name", entry_name) + for v in d: + if v["version"] == version: + msg = f"[ERROR] Helm chart release already exists in the index.yaml: {version}" + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + + tag_name = f"{organization}-{chart}-{version}" + gitutils.add_output("release_tag", tag_name) + tag_api = f"https://api.github.com/repos/{repository}/git/ref/tags/{tag_name}" + headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f'Bearer {os.environ.get("BOT_TOKEN")}', + } + print(f"[INFO] checking tag: {tag_api}") + r = requests.head(tag_api, headers=headers) + if r.status_code == 200: + msg = f"[ERROR] Helm chart release already exists in the GitHub Release/Tag: {tag_name}" + print(msg) + gitutils.add_output("pr-content-error-message", msg) + sys.exit(1) + try: + if prartifact.xRateLimit in r.headers: + print( + f"[DEBUG] {prartifact.xRateLimit} : {r.headers[prartifact.xRateLimit]}" + ) + if prartifact.xRateRemain in r.headers: + print( + f"[DEBUG] {prartifact.xRateRemain} : {r.headers[prartifact.xRateRemain]}" + ) + + response_content = r.json() + if "message" in response_content: + print( + f'[ERROR] getting index file content: {response_content["message"]}' + ) + sys.exit(1) + except json.decoder.JSONDecodeError: + pass + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-b", + "--index-branch", + dest="branch", + type=str, + required=True, + help="index branch", + ) + parser.add_argument( + "-r", + "--repository", + dest="repository", + type=str, + required=True, + help="Git Repository", + ) + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + args = parser.parse_args() + branch = args.branch.split("/")[-1] + ensure_only_chart_is_modified(args.api_url, args.repository, branch) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/indexfile/__init__.py b/scripts/src/indexfile/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/indexfile/index.py b/scripts/src/indexfile/index.py new file mode 100644 index 0000000..b45201d --- /dev/null +++ b/scripts/src/indexfile/index.py @@ -0,0 +1,111 @@ +import json +import requests +import yaml +import semantic_version +import sys + +sys.path.append("../") + +INDEX_FILE = "https://charts.openshift.io/index.yaml" + + +def _make_http_request(url, body=None, params={}, headers={}, verbose=False): + response = requests.get(url, params=params, headers=headers, json=body) + if verbose: + print(json.dumps(headers, indent=4, sort_keys=True)) + print(json.dumps(body, indent=4, sort_keys=True)) + print(json.dumps(params, indent=4, sort_keys=True)) + print(response.text) + return response.text + + +def _load_index_yaml(): + yaml_text = _make_http_request(INDEX_FILE) + dct = yaml.safe_load(yaml_text) + return dct + + +def get_chart_info(tar_name): + index_dct = _load_index_yaml() + for entry, charts in index_dct["entries"].items(): + if tar_name.startswith(entry): + for chart in charts: + index_tar_name = f"{entry}-{chart['version']}" + if tar_name == index_tar_name: + print(f"[INFO] match found: {tar_name}") + providerType = chart["annotations"][ + "charts.openshift.io/providerType" + ] + provider = chart["annotations"]["charts.openshift.io/provider"] + return providerType, provider, chart["name"], chart["version"] + print(f"[INFO] match not found: {tar_name}") + return "", "", "", "" + + +def get_charts_info(): + chart_info_list = [] + + index_dct = _load_index_yaml() + for entry, charts in index_dct["entries"].items(): + for chart in charts: + chart_info = {} + chart_info["name"] = chart["name"] + chart_info["version"] = chart["version"] + chart_info["providerType"] = chart["annotations"][ + "charts.openshift.io/providerType" + ] + chart_info["provider"] = entry.removesuffix(f'-{chart["name"]}') + # print(f'[INFO] found chart : {chart_info["provider"]} {chart["name"]} {chart["version"]} ') + if "charts.openshift.io/supportedOpenShiftVersions" in chart["annotations"]: + chart_info["supportedOCP"] = chart["annotations"][ + "charts.openshift.io/supportedOpenShiftVersions" + ] + else: + chart_info["supportedOCP"] = "" + if "kubeVersion" in chart: + chart_info["kubeVersion"] = chart["kubeVersion"] + else: + chart_info["kubeVersion"] = "" + chart_info_list.append(chart_info) + + return chart_info_list + + +def get_latest_charts(): + chart_list = get_charts_info() + + print(f"{len(chart_list)} charts found in Index file") + + chart_in_process = {"name": ""} + chart_latest_version = "" + latest_charts = [] + + for index, chart in enumerate(chart_list): + chart_name = chart["name"] + # print(f'[INFO] look for latest chart : {chart_name} {chart["version"]}') + if chart_name == chart_in_process["name"]: + new_version = semantic_version.Version.coerce(chart["version"]) + # print(f' [INFO] compare chart versions : {new_version}({chart["version"]}) : {chart_latest_version}') + if new_version > chart_latest_version: + # print(f' [INFO] a new latest chart version : {new_version}') + chart_latest_version = new_version + chart_in_process = chart + else: + if chart_in_process["name"] != "": + # print(f' [INFO] chart completed : {chart_in_process["name"]} {chart_in_process["version"]}') + latest_charts.append(chart_in_process) + + # print(f'[INFO] new chart found : {chart_name} {chart["version"]}') + chart_in_process = chart + chart_version = chart["version"] + if chart_version.startswith("v"): + chart_version = chart_version[1:] + chart_latest_version = semantic_version.Version.coerce(chart_version) + else: + chart_in_process = chart + + if index + 1 == len(chart_list): + # print(f' [INFO] last chart completed : {chart_in_process["name"]} {chart_in_process["version"]}') + latest_charts.append(chart_in_process) + + return latest_charts diff --git a/scripts/src/metrics/__init__.py b/scripts/src/metrics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/metrics/metrics.py b/scripts/src/metrics/metrics.py new file mode 100644 index 0000000..00e96aa --- /dev/null +++ b/scripts/src/metrics/metrics.py @@ -0,0 +1,674 @@ +import argparse +import itertools +import requests +import sys +import analytics +import os +import re +from github import Github + +sys.path.append("../") +from indexfile import index +from pullrequest import prepare_pr_comment as pr_comment +from collections import OrderedDict +from reporegex import matchers + +file_pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r"/.*" +) +chart_downloads_event = "Chart Downloads v1.0" +ignore_users = [ + "zonggen", + "mmulholla", + "dperaza4dustbit", + "openshift-helm-charts-bot", + "baijum", + "tisutisu", + "rhrivero", + "Kartikey-star", +] +pr_submission = "PR Submission v1.0" +pr_merged = "PR Merged v1.0" +pr_outcome = "PR Outcome v1.0" +charts = "charts" +xRateLimit = "X-RateLimit-Limit" +xRateRemain = "X-RateLimit-Remaining" + + +def parse_response(response): + result = [] + for obj in response: + if "name" in obj and "assets" in obj: + for asset in obj["assets"]: + if asset["name"].endswith(".tgz"): + release = { + "name": obj["name"], + "asset": {asset.get("name"): asset.get("download_count", 0)}, + } + result.append(release) + return result + + +def get_release_metrics(): + result = [] + for i in itertools.count(start=1): + request_headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f'Bearer {os.environ.get("BOT_TOKEN")}', + } + response = requests.get( + f"https://api.github.com/repos/openshift-helm-charts/charts/releases?per_page=100&page={i}", + headers=request_headers, + ) + + if not 200 <= response.status_code < 300: + print( + f"[ERROR] unexpected response getting release data : {response.status_code} : {response.reason}" + ) + sys.exit(1) + + response_json = response.json() + if xRateLimit in response.headers: + print(f"[DEBUG] {xRateLimit} : {response.headers[xRateLimit]}") + if xRateRemain in response.headers: + print(f"[DEBUG] {xRateRemain} : {response.headers[xRateRemain]}") + + if "message" in response_json: + print(f'[ERROR] getting pr files: {response_json["message"]}') + sys.exit(1) + + if len(response_json) == 0: + break + result.extend(response_json) + return parse_response(result) + + +def send_release_metrics(write_key, downloads, prefix): + metrics = {} + chart_downloads = [] + chart_downloads_latest = [] + for release in downloads: + _, provider, chart, _ = index.get_chart_info(release.get("name")) + if len(provider) > 0: + if provider not in metrics: + metrics[provider] = {} + if chart not in metrics[provider]: + metrics[provider][chart] = {} + + for key in release.get("asset"): + metrics[provider][chart][key] = release.get("asset")[key] + + for provider in metrics: + for chart in metrics[provider]: + ordered_download_perChart = OrderedDict( + sorted( + metrics[provider][chart].items(), key=lambda i: i[1], reverse=True + ) + ) + for key, value in ordered_download_perChart.items(): + chart_downloads_latest.append( + {"downloads": value, "name": key, "provider": provider} + ) + break + for key, value in metrics[provider][chart].items(): + chart_downloads.append( + {"downloads": value, "name": key, "provider": provider} + ) + chart_downloads.sort(key=lambda k: k["downloads"], reverse=True) + chart_downloads_latest.sort(key=lambda k: k["downloads"], reverse=True) + + for x in range(len(chart_downloads)): + send_download_metric( + write_key, + chart_downloads[x]["provider"], + chart_downloads[x]["downloads"], + chart_downloads[x]["name"], + x + 1, + prefix, + ) + + for x in range(5): + send_top_five_metric( + write_key, + chart_downloads_latest[x]["provider"], + chart_downloads_latest[x]["downloads"], + chart_downloads_latest[x]["name"], + x + 1, + prefix, + ) + + +def send_download_metric(write_key, partner, downloads, artifact_name, rank, prefix): + id = f"{prefix}-{partner}-{artifact_name}" + properties = {"downloads": downloads, "rank": rank, "name": artifact_name} + + send_metric(write_key, id, chart_downloads_event, properties) + + +def send_top_five_metric(write_key, partner, downloads, artifact_name, rank, prefix): + id = f"{prefix}-top5" + properties = {"downloads": downloads, "rank": rank, "name": artifact_name} + + send_metric(write_key, id, chart_downloads_event, properties) + + +def send_pull_request_metrics(write_key, g): + chart_submissions = 0 + partners = [] + partner_charts = [] + charts_merged = 0 + charts_abandoned = 0 + charts_in_progress = 0 + abandoned = [] + repo = g.get_repo("openshift-helm-charts/charts") + pull_requests = repo.get_pulls(state="all") + for pr in pull_requests: + pr_content, type, provider, chart, version = check_and_get_pr_content(pr, repo) + if pr_content != "not-chart": + chart_submissions += 1 + if pr.closed_at and not pr.merged_at: + charts_abandoned += 1 + print(f"[INFO] Abandoned PR: {pr.number} ") + abandoned.append(pr.number) + elif pr.merged_at: + charts_merged += 1 + if type == "partner": + if provider not in partners: + partners.append(provider) + if chart not in partner_charts: + partner_charts.append(chart) + else: + charts_in_progress += 1 + + check_rate_limit(g, False) + + print(f"[INFO] abandoned PRS: {abandoned}") + send_summary_metric( + write_key, + chart_submissions, + charts_merged, + charts_abandoned, + charts_in_progress, + len(partners), + len(partner_charts), + ) + + +def get_pr_files(pr): + files = pr.get_files() + pr_chart_submission_files = [] + for file in files: + pr_chart_submission_files.append(file.filename) + return pr_chart_submission_files + + +def process_report_fails(message_file): + fails = "0" + num_error_messages = 0 + error_messages = [] + checks_failed = [] + + fails_started = False + check_failures = False + non_check_failures = False + + with open(message_file) as file: + message_lines = [line.rstrip() for line in file] + for message_line in message_lines: + if not fails_started: + fails_started = pr_comment.get_verifier_errors_comment() in message_line + else: + if "[ERROR] Chart verifier report includes failures:" in message_line: + check_failures = True + if pr_comment.get_verifier_errors_trailer() in message_line: + break + elif "Number of checks failed" in message_line: + body_line_parts = message_line.split(":") + fails = body_line_parts[1].strip() + print(f"[INFO] Number of failures in report: {fails}") + elif fails != "0": + if "Error message(s)" in message_line: + num_error_messages = 1 + elif num_error_messages <= int(fails): + print(f"[INFO] add error message: {message_line.strip()}") + error_messages.append(message_line.strip()) + num_error_messages += 1 + elif not check_failures and len(message_line) > 0: + non_check_failures = True + print(f"[INFO] non-check message: {message_line.strip()}") + error_messages.append(message_line.strip()) + + if check_failures: + for error_message in error_messages: + if ( + "Missing required annotations" in error_message + or "Empty metadata in chart" in error_messages + ): + checks_failed.append("required-annotations-present") + elif "Chart test files do not exist" in error_message: + checks_failed.append("contains-test") + elif "API version is not V2, used in Helm 3" in error_message: + checks_failed.append("is-helm-v3") + elif "Values file does not exist" in error_message: + checks_failed.append("contains-values") + elif "Values schema file does not exist" in error_message: + checks_failed.append("contains-values-schema") + elif ( + "Kubernetes version is not specified" in error_message + or "Error converting kubeVersion to an OCP range" in error_message + ): + checks_failed.append("has-kubeversion") + elif "Helm lint has failed" in error_message: + checks_failed.append("helm_lint") + elif ( + "Failed to certify images" in error_message + or "Image is not Red Hat certified" in error_message + ): + if "images-are-certified" not in checks_failed: + checks_failed.append("images-are-certified") + elif "Chart does not have a README" in error_message: + checks_failed.append("has-readme") + elif "Missing mandatory check" in error_messages: + checks_failed.append("missing-mandatory-check") + elif "Chart contains CRDs" in error_messages: + checks_failed.append("not-contains-crds") + elif "CSI objects exist" in error_message: + checks_failed.append("not-contain-csi-objects") + else: + checks_failed.append("chart-testing") + elif non_check_failures: + fails = "1" + checks_failed.append("other-non-check-failure") + + return int(fails), checks_failed + + +def process_comments(repo, pr): + issue = repo.get_issue(number=pr.number) + comments = issue.get_comments() + num_builds = 0 + for comment in comments: + report_result = parse_message(comment.body, pr.number) + if report_result != "not-found": + num_builds += 1 + + return num_builds + + +def process_comment_file(message_file, pr_number): + with open(message_file, "r") as file: + message = file.read() + + return parse_message(message, pr_number) + + +def parse_message(message, pr_number): + report_result = "not-found" + if pr_comment.get_comment_header(pr_number) in message: + if pr_comment.get_verifier_errors_comment() in message: + report_result = "report-failure" + elif pr_comment.get_content_failure_message() in message: + report_result = "content-failure" + elif pr_comment.get_success_coment() in message: + report_result = "report-pass" + elif pr_comment.get_community_review_message() in message: + report_result = "community_review" + + print(f"[INFO] report_result : {report_result}") + return report_result + + +def get_pr_content(pr): + pr_content = "not-chart" + pr_chart_submission_files = get_pr_files(pr) + if len(pr_chart_submission_files) > 0: + match = file_pattern.match(pr_chart_submission_files[0]) + if match: + type, org, chart, version = match.groups() + if type == "partners": + type = "partner" + print( + f"[INFO] Found PR {pr.number}:{pr.user.login}: type: {type},org: {org},chart: {chart},version: {version}, #files: {len(pr_chart_submission_files)}, file match: {pr_chart_submission_files[0]}" + ) + tgz_found = False + report_found = False + src_found = False + for file in pr_chart_submission_files: + filename = os.path.basename(file) + if filename == "report.yaml": + report_found = True + elif filename.endswith(".tgz"): + tgz_found = True + elif filename == "Chart.yaml" and len(pr_chart_submission_files) > 2: + src_found = True + + if report_found: + if tgz_found: + pr_content = "report and tgz" + elif src_found: + pr_content = "src and report" + else: + pr_content = "report only" + elif tgz_found: + pr_content = "tgz only" + elif src_found: + pr_content = "src only" + + return pr_content, type, org, chart, version + + return pr_content, "", "", "", "" + + +def check_and_get_pr_content(pr, repo): + repo_name = repo.full_name + if ( + (pr.user.login in ignore_users and pr.user.login not in repo_name) + or pr.draft + or pr.base.ref != "main" + ): + print( + f"[INFO] Ignore pr, user: {pr.user.login}, draft: {pr.draft}, target_branch: {pr.base.ref}" + ) + return "not-chart", "", "", "", "" + + return get_pr_content(pr) + + +def process_pr(write_key, repo, message_file, pr_number, action, prefix, pr_directory): + pr = repo.get_pull(int(pr_number)) + pr_content, type, provider, chart, version = check_and_get_pr_content(pr, repo) + if pr_content != "not-chart": + if action == "opened": + send_submission_metric( + write_key, + type, + provider, + chart, + pr_number, + pr_content, + prefix, + pr_directory, + ) + + pr_result = process_comment_file(message_file, pr_number) + num_fails = 0 + if pr_result == "report-failure": + num_fails, checks_failed = process_report_fails(message_file) + for check in checks_failed: + send_check_metric(write_key, type, provider, chart, pr_number, check) + elif pr_result == "content-failure": + num_fails = 1 + + send_outcome_metric( + write_key, type, provider, chart, pr_number, pr_result, num_fails, prefix + ) + + # if pr is merged we can collect summary stats + if pr.merged_at: + builds = process_comments(repo, pr) + print(f"[INFO] PR build cycles : {builds}") + builds_out = str(builds) + if builds > 5: + builds_out = "> 5" + + elapsed_time = pr.merged_at - pr.created_at + # round up to an hour to avoid 0 time + elapsed_hours = elapsed_time.total_seconds() // 3600 + duration = "0-1 hours" + if 24 > elapsed_hours > 1: + duration = "1-24 hours" + elif 168 > elapsed_hours > 24: + duration = "1-7 days" + elif elapsed_hours > 168: + duration = "> 7 days" + + send_merge_metric( + write_key, + type, + provider, + chart, + duration, + pr_number, + builds_out, + pr_content, + prefix, + pr_directory, + ) + + +def send_summary_metric( + write_key, + num_submissions, + num_merged, + num_abandoned, + num_in_progress, + num_partners, + num_charts, +): + properties = { + "submissions": num_submissions, + "merged": num_merged, + "abandoned": num_abandoned, + "in_progress": num_in_progress, + "partners": num_partners, + "partner_charts": num_charts, + } + id = "helm-metric-summary" + + send_metric(write_key, id, "PR Summary", properties) + + +def send_outcome_metric( + write_key, type, provider, chart, pr_number, outcome, num_fails, prefix +): + properties = { + "type": type, + "provider": provider, + "chart": chart, + "pr": pr_number, + "outcome": outcome, + "failures": num_fails, + } + id = f"{prefix}-{type}-{provider}" + + send_metric(write_key, id, pr_outcome, properties) + + +def send_check_metric(write_key, type, partner, chart, pr_number, check): + properties = { + "type": type, + "provider": partner, + "chart": chart, + "pr": pr_number, + "check": check, + } + id = f"helm-metric-{partner}" + + send_metric(write_key, id, "PR Report Fails", properties) + + +def send_merge_metric( + write_key, + type, + partner, + chart, + duration, + pr_number, + num_builds, + pr_content, + prefix, + pr_directory, +): + update = getChartUpdate(type, partner, chart, pr_directory) + id = f"{prefix}-{type}-{partner}" + properties = { + "type": type, + "provider": partner, + "chart": chart, + "pr": pr_number, + "builds": num_builds, + "duration": duration, + "content": pr_content, + "update": update, + } + + send_metric(write_key, id, pr_merged, properties) + + +def send_submission_metric( + write_key, type, partner, chart, pr_number, pr_content, prefix, pr_directory +): + update = getChartUpdate(type, partner, chart, pr_directory) + id = f"{prefix}-{type}-{partner}" + properties = { + "type": type, + "provider": partner, + "chart": chart, + "pr": pr_number, + "pr content": pr_content, + "update": update, + } + + send_metric(write_key, id, pr_submission, properties) + + +def on_error(error, items): + print("An error occurred creating metrics:", error) + print("error with items:", items) + sys.exit(1) + + +def send_metric(write_key, id, event, properties): + analytics.write_key = write_key + analytics.on_error = on_error + + print(f"[INFO] Add track: id: {id}, event:{event}, properties:{properties}") + + analytics.track(id, event, properties) + + +def check_rate_limit(g, force): + rate_limit = g.get_rate_limit() + if force or rate_limit.core.remaining < 10: + print(f"[INFO] rate limit info: {rate_limit.core}") + + +def getChartUpdate(type, partner, chart, cwd): + if type == "partner": + directory_type = "partners" + else: + directory_type = type + directoryPath = os.path.join(cwd, charts, directory_type, partner, chart) + # Checking if the directory contains only the OWNERS file + print(os.listdir(directoryPath)) + if len(os.listdir(directoryPath)) == 1: + return "new chart" + else: + return "new version" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-k", + "--write-key", + dest="write_key", + type=str, + required=True, + help="segment write key", + ) + parser.add_argument( + "-t", + "--metric-type", + dest="type", + type=str, + required=True, + help="metric type, releases or pull_request", + ) + parser.add_argument( + "-m", + "--message-file", + dest="message_file", + type=str, + required=False, + help="message for metric", + ) + parser.add_argument( + "-n", + "--pr-number", + dest="pr_number", + type=str, + required=False, + help="number of teh pr", + ) + parser.add_argument( + "-a", + "--pr-action", + dest="pr_action", + type=str, + required=False, + help="The event action of the pr", + ) + parser.add_argument( + "-r", + "--repository", + dest="repository", + type=str, + required=False, + help="The repository of the pr", + ) + parser.add_argument( + "-p", + "--prefix", + dest="prefix", + type=str, + required=False, + help="The prefix of the id in segment", + ) + parser.add_argument( + "-d", + "--pr_dir", + dest="pr_dir", + type=str, + required=False, + help="Directory of pull request code.", + ) + + args = parser.parse_args() + print("Input arguments:") + print(f" --write-key length : {len(args.write_key)}") + print(f" --metric-type : {args.type}") + print(f" --messsage-file : {args.message_file}") + print(f" --pr-number : {args.pr_number}") + print(f" --pr-action : {args.pr_action}") + print(f" --repository : {args.repository}") + print(f" --prefix : {args.prefix}") + print(f" --pr_dir : {args.pr_dir}") + + if not args.write_key: + print("Error: Segment write key not set") + sys.exit(1) + + g = Github(os.environ.get("BOT_TOKEN")) + + if args.type == "pull_request": + repo_current = g.get_repo(args.repository) + process_pr( + args.write_key, + repo_current, + args.message_file, + args.pr_number, + args.pr_action, + args.prefix, + args.pr_dir, + ) + else: + check_rate_limit(g, True) + send_release_metrics(args.write_key, get_release_metrics(), args.prefix) + check_rate_limit(g, True) + send_pull_request_metrics(args.write_key, g) + check_rate_limit(g, True) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/metrics/pushowners.py b/scripts/src/metrics/pushowners.py new file mode 100644 index 0000000..3612ac9 --- /dev/null +++ b/scripts/src/metrics/pushowners.py @@ -0,0 +1,204 @@ +import argparse +import sys +import analytics + +sys.path.append("../") +from owners import owners_file + + +def getVendorType(changed_file): + path_as_list = changed_file.split("/") + for i in range(len(path_as_list) - 1): + if path_as_list[i] == "charts": + vendor_type = path_as_list[i + 1] + return vendor_type + + +def getFileContent(changed_file): + status, owner_data = owners_file.get_owner_data_from_file(changed_file) + if status is True: + users_included = owners_file.get_users_included(owner_data) + web_catalog_only = owners_file.get_web_catalog_only(owner_data) + if not web_catalog_only: + web_catalog_only_string = "No" + else: + web_catalog_only_string = "Yes" + vendor_name = owners_file.get_vendor(owner_data) + chart_name = owners_file.get_chart(owner_data) + vendor_type = getVendorType(changed_file) + return ( + users_included, + web_catalog_only_string, + vendor_name, + chart_name, + vendor_type, + ) + else: + print("Exception loading OWNERS file") + return "", "", "", "", "" + + +def process_pr(added_file, modified_file): + if modified_file != "": + action = "update" + update = "existing-vendor" + ( + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + ) = getFileContent(modified_file) + return ( + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + action, + update, + ) + elif added_file != "": + action = "create" + update = "new-vendor" + ( + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + ) = getFileContent(added_file) + return ( + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + action, + update, + ) + + +def send_owner_metric( + write_key, + prefix, + users_included, + web_catalog_only, + partner, + chart_name, + type, + action, + update, +): + if chart_name != "" and partner != "": + id = f"{prefix}-{type}-{chart_name}" + properties = { + "type": type, + "vendor": partner, + "chart": chart_name, + "users_included": users_included, + "provider_delivery": web_catalog_only, + "action": action, + "update": update, + } + send_metric(write_key, id, "owners v1.0", properties) + + +def on_error(error, items): + print("An error occurred creating metrics:", error) + print("error with items:", items) + sys.exit(1) + + +def send_metric(write_key, id, event, properties): + analytics.write_key = write_key + analytics.on_error = on_error + + print(f"[INFO] Add track: id: {id}, event:{event}, properties:{properties}") + + analytics.track(id, event, properties) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-k", + "--write-key", + dest="write_key", + type=str, + required=True, + help="segment write key", + ) + parser.add_argument( + "-t", + "--metric-type", + dest="type", + type=str, + required=True, + help="metric type, releases or pull_request", + ) + parser.add_argument( + "-n", "--added", dest="added", nargs="*", required=False, help="files added" + ) + parser.add_argument( + "-a", + "--modified", + dest="modified", + nargs="*", + required=False, + help="files modified", + ) + parser.add_argument( + "-r", + "--repository", + dest="repository", + type=str, + required=False, + help="The repository of the pr", + ) + parser.add_argument( + "-p", + "--prefix", + dest="prefix", + type=str, + required=False, + help="The prefix of the id in segment", + ) + + args = parser.parse_args() + print("Input arguments:") + print(f" --write-key length : {len(args.write_key)}") + print(f" --metric-type : {args.type}") + print(f" --added : {args.added}") + print(f" --modified : {args.modified}") + print(f" --repository : {args.repository}") + print(f" --prefix : {args.prefix}") + + if not args.write_key: + print("Error: Segment write key not set") + sys.exit(1) + + ( + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + action, + update, + ) = process_pr(args.added[0], args.modified[0]) + send_owner_metric( + args.write_key, + args.prefix, + users_included, + web_catalog_only, + vendor_name, + chart_name, + vendor_type, + action, + update, + ) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/owners/__init__.py b/scripts/src/owners/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/owners/checkuser.py b/scripts/src/owners/checkuser.py new file mode 100644 index 0000000..05b615b --- /dev/null +++ b/scripts/src/owners/checkuser.py @@ -0,0 +1,95 @@ +""" +Used by a github action to determine if the owner of a PR is permitted to change the files +associated with publishing a release of the development workflows. + +parameters: + --api-url : API URL for the pull request + --user : user to be checked for authority to modify release files in a PR + +results: + exit code 1 if pull request contains restricted files and user is not authorized to modify them. +""" + +import re +import argparse +import os +import sys +import yaml + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +sys.path.append("../") +from pullrequest import prartifact + + +OWNERS_FILE = "OWNERS" +VERSION_FILE = "release/release_info.json" +THIS_FILE = "scripts/src/owners/checkuser.py" + + +def verify_user(username): + print(f"[INFO] Verify user. {username}") + if not os.path.exists(OWNERS_FILE): + print(f"[ERROR] {OWNERS_FILE} file does not exist.") + else: + data = open(OWNERS_FILE).read() + out = yaml.load(data, Loader=Loader) + if username in out["approvers"]: + print(f"[INFO] {username} authorized") + return True + else: + print(f"[ERROR] {username} not auhtorized") + return False + + +def check_for_restricted_file(api_url): + files = prartifact.get_modified_files(api_url) + pattern_owners = re.compile(OWNERS_FILE) + pattern_versionfile = re.compile(VERSION_FILE) + pattern_thisfile = re.compile(THIS_FILE) + + for filename in files: + if ( + pattern_versionfile.match(filename) + or pattern_owners.match(filename) + or pattern_thisfile.match(filename) + ): + print(f"[INFO] restricted file found: {filename}") + return True + + return False + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-a", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + parser.add_argument( + "-u", + "--user", + dest="username", + type=str, + required=True, + help="user to be checked for authority to modify release files in a PR", + ) + args = parser.parse_args() + + if check_for_restricted_file(args.api_url): + if verify_user(args.username): + print(f"[INFO] {args.username} is authorized to modify all files in the PR") + else: + print( + f"[INFO] {args.username} is not authorized to modify all files in the PR" + ) + sys.exit(1) + else: + print("[INFO] no restricted files found in the PR") diff --git a/scripts/src/owners/owners_file.py b/scripts/src/owners/owners_file.py new file mode 100644 index 0000000..7cbeac1 --- /dev/null +++ b/scripts/src/owners/owners_file.py @@ -0,0 +1,69 @@ +import os + +import yaml + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + + +def get_owner_data(category, organization, chart): + path = os.path.join("charts", category, organization, chart, "OWNERS") + status, owner_content = get_owner_data_from_file(path) + return status, owner_content + + +def get_owner_data_from_file(owner_path): + try: + with open(owner_path) as owner_data: + owner_content = yaml.load(owner_data, Loader=Loader) + return True, owner_content + except Exception as err: + print(f"Exception loading OWNERS file: {err}") + return False, "" + + +def get_vendor(owner_data): + vendor = "" + try: + vendor = owner_data["vendor"]["name"] + except Exception: + pass + return vendor + + +def get_chart(owner_data): + chart = "" + try: + chart = owner_data["chart"]["name"] + except Exception: + pass + return chart + + +def get_web_catalog_only(owner_data): + web_catalog_only = False + try: + if "webCatalogOnly" in owner_data: + web_catalog_only = owner_data["webCatalogOnly"] + elif "providerDelivery" in owner_data: + web_catalog_only = owner_data["providerDelivery"] + except Exception: + pass + return web_catalog_only + + +def get_users_included(owner_data): + users_included = "No" + try: + users = owner_data["users"] + if len(users) != 0: + return "Yes" + except Exception: + pass + return users_included + + +def get_pgp_public_key(owner_data): + return owner_data.get("publicPgpKey", "") diff --git a/scripts/src/owners/user_is_repo_owner.py b/scripts/src/owners/user_is_repo_owner.py new file mode 100644 index 0000000..390ab82 --- /dev/null +++ b/scripts/src/owners/user_is_repo_owner.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 + +"""A quick way to check if a given user is an approver in the repository's OWNERS file. + +Accepts only a single value (the username) + +Returns 0 if the user is found in the OWNERS file in the approver section. +Returns 1 if the user is NOT found in the OWNERS file. +Any other non-zero is considered a failed execution (contextually, something broke) +""" + +import os +import sys +import yaml +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +OWNERS_FILE="OWNERS" + +def is_approver(username: str): + """Returns true if username is in the OWNERS file + + Raises an Exception in cases where the content from the OWNERS file + does not match our expectations. + """ + + with open(OWNERS_FILE, 'r') as f: + data = f.read() + out = yaml.load(data, Loader=Loader) + + if "approvers" not in out: + raise Exception('OWNERS file did not have the "approvers" key and it is required') + + approvers = out.get('approvers') + if type(approvers) is not list: + raise Exception('The "approvers" key was not a list, and a list is expected') + + if username in out.get("approvers"): + return True + + return False + + +def main(): + if len(sys.argv) != 2: + print('[Error] This script accepts only a single string as an argument, representing the user to check.') + return 10 + + user = sys.argv[1] + + print(f'[Info] Checking OWNERS file at path {os.path.abspath(OWNERS_FILE)}') + if not os.path.exists(OWNERS_FILE): + print(f'{OWNERS_FILE} file does not exist.') + return 20 + + try: + if is_approver(user): + print(f'[Info] User "{user}" is an approver.') + return 0 + except Exception as e: + print(f'[Error] Could not extract expected values from OWNERS file: {e}.') + return 30 + + print(f'[Info] User "{user}" is NOT an approver.') + return 1 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/scripts/src/pullrequest/__init__.py b/scripts/src/pullrequest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/pullrequest/prartifact.py b/scripts/src/pullrequest/prartifact.py new file mode 100644 index 0000000..a0f74f7 --- /dev/null +++ b/scripts/src/pullrequest/prartifact.py @@ -0,0 +1,158 @@ +import os +import sys +import argparse +import shutil +import pathlib + +import requests + +sys.path.append("../") +from checkprcontent import checkpr +from tools import gitutils + +pr_files = [] +pr_labels = [] +xRateLimit = "X-RateLimit-Limit" +xRateRemain = "X-RateLimit-Remaining" + + +# TODO(baijum): Move this code under chartsubmission.chart module +def get_modified_charts(api_url): + files = get_modified_files(api_url) + pattern, _, _ = checkpr.get_file_match_compiled_patterns() + for file in files: + match = pattern.match(file) + if match: + category, organization, chart, version = match.groups() + return category, organization, chart, version + + return "", "", "", "" + + +def get_modified_files(api_url): + """Populates and returns the list of files modified by this the PR + + Args: + api_url (str): URL of the GitHub PR + + Returns: + list[str]: List of modified files + """ + if not pr_files: + page_number = 1 + max_page_size, page_size = 100, 100 + headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f'Bearer {os.environ.get("BOT_TOKEN")}', + } + files_api_url = f"{api_url}/files" + + while page_size == max_page_size: + files_api_query = f"{files_api_url}?per_page={page_size}&page={page_number}" + print(f"[INFO] Query files : {files_api_query}") + r = requests.get(files_api_query, headers=headers) + files = r.json() + page_size = len(files) + page_number += 1 + + if xRateLimit in r.headers: + print(f"[DEBUG] {xRateLimit} : {r.headers[xRateLimit]}") + if xRateRemain in r.headers: + print(f"[DEBUG] {xRateRemain} : {r.headers[xRateRemain]}") + + if "message" in files: + print(f'[ERROR] getting pr files: {files["message"]}') + sys.exit(1) + else: + for file in files: + if "filename" in file: + pr_files.append(file["filename"]) + + return pr_files + + +def get_labels(api_url): + if not pr_labels: + headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f'Bearer {os.environ.get("BOT_TOKEN")}', + } + r = requests.get(api_url, headers=headers) + pr_data = r.json() + + if xRateLimit in r.headers: + print(f"[DEBUG] {xRateLimit} : {r.headers[xRateLimit]}") + if xRateRemain in r.headers: + print(f"[DEBUG] {xRateRemain} : {r.headers[xRateRemain]}") + + if "message" in pr_data: + print(f'[ERROR] getting pr files: {pr_data["message"]}') + sys.exit(1) + if "labels" in pr_data: + for label in pr_data["labels"]: + pr_labels.append(label["name"]) + + return pr_labels + + +def save_metadata(directory, vendor_label, chart, number): + with open(os.path.join(directory, "vendor"), "w") as fd: + print(f"add {directory}/vendor as {vendor_label}") + fd.write(vendor_label) + + with open(os.path.join(directory, "chart"), "w") as fd: + print(f"add {directory}/chart as {chart}") + fd.write(chart) + + with open(os.path.join(directory, "NR"), "w") as fd: + fd.write(number) + + if os.path.exists("report.yaml"): + shutil.copy("report.yaml", directory) + else: + pathlib.Path(os.path.join(directory, "report.yaml")).touch() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-d", + "--directory", + dest="directory", + type=str, + required=False, + help="artifact directory for archival", + ) + parser.add_argument( + "-n", + "--pr-number", + dest="number", + type=str, + required=False, + help="current pull request number", + ) + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + parser.add_argument( + "-f", "--get-files", dest="get_files", default=False, action="store_true" + ) + + args = parser.parse_args() + if args.get_files: + pr_files = get_modified_files(args.api_url) + print(f"[INFO] files in pr: {pr_files}") + gitutils.add_output("pr_files", pr_files) + else: + os.makedirs(args.directory, exist_ok=True) + category, organization, chart, version = get_modified_charts(args.api_url) + save_metadata(args.directory, organization, chart, args.number) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/pullrequest/prepare_pr_comment.py b/scripts/src/pullrequest/prepare_pr_comment.py new file mode 100644 index 0000000..e6faf08 --- /dev/null +++ b/scripts/src/pullrequest/prepare_pr_comment.py @@ -0,0 +1,294 @@ +import os +import sys +from tools import gitutils + + +def get_success_coment(): + return ( + "Congratulations! Your chart has been certified and will be published shortly." + ) + + +def get_content_failure_message(): + return "One or more errors were found with the pull request:" + + +def get_community_review_message(): + return "Community charts require maintainer review and approval, a review will be conducted shortly." + + +def get_failure_comment(): + return ( + "There were one or more errors while building and verifying your pull request." + ) + + +def get_comment_header(pr_number): + return f"Thank you for submitting PR #{pr_number} for Helm Chart Certification!" + + +def get_verifier_errors_comment(): + return "[ERROR] The submitted chart has failed certification. Reason(s):" + + +def get_verifier_errors_trailer(): + return " ".join( + [ + "Please create a valid report with the", + "[chart-verifier](https://github.com/redhat-certification/chart-verifier)", + "and ensure all mandatory checks pass.", + ] + ) + + +def get_look_at_job_output_comment(): + return """To see the console output with the error messages, click the "Details" \ +link next to "CI / Chart Certification" job status towards the end of this page.""" + + +def prepare_failure_comment(): + """assembles the comment for certification failures + + Will attempt to read a file with error messaging from the filesystem + and includes that information in its content. (e.g. ./pr/errors) + """ + msg = get_failure_comment() + msg = append_to(msg, get_look_at_job_output_comment()) + if os.path.exists("./pr/errors"): + errors = open("./pr/errors").read() + msg = append_to(msg, get_verifier_errors_comment()) + msg = append_to(msg, errors) + msg = append_to(msg, get_verifier_errors_trailer()) + gitutils.add_output("error-message", errors) + else: + gitutils.add_output("error-message", get_failure_comment()) + return msg + + +def prepare_pr_content_failure_comment(): + """Generate a message for PR Content Check Failures + + This function reaches into the environment for known variables + that contain error messages. + + Error messages are then passed into the GITHUB_OUTPUT. + + Returns a formatted string containing error message contents. + """ + msg = f"{get_content_failure_message()}" + pr_content_error_msg = os.environ.get("PR_CONTENT_ERROR_MESSAGE", "") + owners_error_msg = os.environ.get("OWNERS_ERROR_MESSAGE", "") + if pr_content_error_msg: + gitutils.add_output("error-message", pr_content_error_msg) + msg = append_to(msg, f"{pr_content_error_msg}") + if owners_error_msg: + gitutils.add_output("error-message", owners_error_msg) + msg = append_to(msg, f"{owners_error_msg}") + return msg + + +def prepare_run_verifier_failure_comment(): + verifier_error_msg = os.environ.get("VERIFIER_ERROR_MESSAGE", "") + gitutils.add_output("error-message", verifier_error_msg) + msg = verifier_error_msg + msg = append_to(msg, get_look_at_job_output_comment()) + return msg + + +def prepare_community_comment(): + msg = f"{get_community_review_message()}" + if os.path.exists("./pr/errors"): + errors = open("./pr/errors").read() + msg = append_to( + msg, + "However, **please note** that one or more errors were found while building and verifying your pull request:", + ) + msg = append_to(msg, f"{errors}") + return msg + + +def prepare_generic_fail_comment(): + msg = "" + if os.path.exists("./pr/errors"): + errors = open("./pr/errors").read() + msg = append_to( + msg, + "One or more errors were found while building and verifying your pull request:", + ) + msg = append_to(msg, f"{errors}") + else: + msg = append_to( + msg, + "An unspecified error has occured while building and verifying your pull request.", + ) + return msg + + +def prepare_oc_install_fail_comment(): + return " ".join( + [ + "Unfortunately the certification process failed to install OpenShift Clientand could not complete.", + "This problem will be addressed by maintainers and no further action is required from the submitter at this time.", + ] + ) + + +def append_to(msg, new_content, use_horizontal_divider=False): + """Appends new_content to the msg. + + This utility function helps simplify the building of our PR comment + template. msg and new_content are joined with either a newline separator, or + a horizontal line (if the keyword argument is provided). + + It should be used in cases where the caller needs to join semi-related + ideas. Callers should instead use the join string method in cases where the + msg being constructed is a part of the same 'idea', or 'paragraph'. + + Args: + msg: The original message to which we should append new_content. + new_content: the new string to add. + use_horizontal_divider: Whether to divide the content + with a horizontal line (in markdown.) Horizontal lines are surrounded + in newlines to ensure that it does not inadvertently cause preceding + content to become a Header. + + Returns the msg containing the new content. + """ + divider_string = "" + if use_horizontal_divider: + divider_string = "\n---\n" + + return f""" +{msg} +{divider_string} +{new_content} +""".strip() # Remove surrounding whitespace, like that which is added by putting """ on a newline here. + + +def get_support_information(): + reqs_doc_link = "https://github.com/redhat-certification/chart-verifier/blob/main/docs/helm-chart-checks.md#types-of-helm-chart-checks" + support_link = "https://access.redhat.com/articles/6463941" + return "\n".join( + [ + "For information on the certification process see:", + f"- [Red Hat certification requirements and process for Kubernetes applications that are deployed using Helm charts.]({reqs_doc_link}).", + f"- For support, connect with our [Partner Acceleration Desk]({support_link}).", + ] + ) + + +def metadata_label(vendor_label, chart_name): + """Returns the metadata context that must suffix PR comments.""" + return ( + f'/metadata {{"vendor_label": "{vendor_label}", "chart_name": "{chart_name}"}}' + ) + + +def task_table(task_tuples): + """returns a markdown table containing tasks and their outcome + + Args: + task_tuples: a list of two-length tuples where index 0 is the task + and index 1 is the outcome. These values should be short. + """ + sorted(task_tuples) + msg = "|task|outcome|" + "\n|---|---|" + for task_tuple in task_tuples: + msg += f"\n|{task_tuple[0]}|{task_tuple[1]}|" + return msg + + +def overall_outcome(outcome): + return append_to("### Outcome:", f"**{outcome}**") + + +def main(): + pr_content_result = sys.argv[1] + run_verifier_result = sys.argv[2] + verify_result = sys.argv[3] + issue_number = open("./pr/NR").read().strip() + vendor_label = open("./pr/vendor").read().strip() + chart_name = open("./pr/chart").read().strip() + + community_manual_review = os.environ.get("COMMUNITY_MANUAL_REVIEW", False) + oc_install_result = os.environ.get("OC_INSTALL_RESULT") + + msg = get_comment_header(issue_number) + + # Assemble the detail separately to control order in which it is added to + # the overall output. + detail_message = "### Detail" + + outcome = "Failed" + + # Handle success explicitly + if ( + pr_content_result == "success" + # run_verifier may not run if a report is not needed. + and run_verifier_result in ["success", "skipped"] + and verify_result == "success" + # installation of oc may not run if a cluster is not needed. + and oc_install_result in ["success", "skipped"] + ): + outcome = "Passed" + detail_message = append_to(detail_message, get_success_coment()) + gitutils.add_output("pr_passed", "true") + else: # Handle various failure scenarios. + if pr_content_result == "failure": + detail_message = append_to( + detail_message, prepare_pr_content_failure_comment() + ) + gitutils.add_output("pr_passed", "false") + elif run_verifier_result == "failure": + detail_message = append_to( + detail_message, prepare_run_verifier_failure_comment() + ) + gitutils.add_output("pr_passed", "false") + elif verify_result == "failure": + if community_manual_review: + outcome = "Pending Manual Review" + detail_message = append_to(detail_message, prepare_community_comment()) + gitutils.add_output("pr_passed", "true") + else: + detail_message = append_to(detail_message, prepare_failure_comment()) + gitutils.add_output("pr_passed", "false") + elif oc_install_result == "failure": + detail_message = append_to( + detail_message, prepare_oc_install_fail_comment() + ) + gitutils.add_output("pr_passed", "false") + else: + detail_message = append_to(detail_message, prepare_generic_fail_comment()) + gitutils.add_output("pr_passed", "false") + + msg = append_to(msg, overall_outcome(outcome)) + msg = append_to(msg, detail_message) + if outcome != "Passed": + table = task_table( + [ + ("PR Content Check", pr_content_result), + ("Run Chart Verifier", run_verifier_result), + ("Result Verification", verify_result), + ("OpenShift Client Installation", oc_install_result), + ] + ) + msg = append_to(msg, "### Task Insights") + msg = append_to(msg, "Here are the outcomes of tasks driving this result.") + msg = append_to(msg, table) + + # All comments get helpful links and a metadata + msg = append_to(msg, get_support_information(), use_horizontal_divider=True) + msg = append_to(msg, metadata_label(vendor_label, chart_name)) + + # Print to the console so it's easily visible from CI. + print("*" * 30) + print(msg) + print("*" * 30) + + with open("./pr/comment", "w") as fd: + fd.write(msg) + gitutils.add_output("message-file", fd.name) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/release/__init__.py b/scripts/src/release/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/release/release_info.py b/scripts/src/release/release_info.py new file mode 100644 index 0000000..073f01f --- /dev/null +++ b/scripts/src/release/release_info.py @@ -0,0 +1,114 @@ +""" +Used by github actions,specifically as part of the charts auto release process defined in +.github/workflow/release.yml. Encapsulates the release_info.json file. + +Provides get functions for all data in the release_info.json file. +""" +import json +import os + + +RELEASE_INFO_FILE = "release/release_info.json" + +RELEASE_INFOS = {} + + +def _get_release_info(directory): + global RELEASE_INFOS + + if not directory: + directory = "./" + + root_dir = os.path.dirname(f"{os.getcwd()}/{directory}") + + if root_dir not in RELEASE_INFOS: + print(f"Open release_info file: {root_dir}/{RELEASE_INFO_FILE}") + + with open(f"{root_dir}/{RELEASE_INFO_FILE}", "r") as json_file: + RELEASE_INFOS[root_dir] = json.load(json_file) + + return RELEASE_INFOS[root_dir] + + +def get_version(directory): + info = _get_release_info(directory) + return info["version"] + + +def get_info(directory): + info = _get_release_info(directory) + return info["info"] + + +def get_replaces(from_repo, to_repo, directory): + print(f"get replaces for {from_repo} to {to_repo} ") + info = _get_release_info(directory) + if from_repo in info: + if "replace" in info[from_repo][to_repo]: + print(f"replaces found: {info[from_repo][to_repo]['replace']}") + return info[from_repo][to_repo]["replace"] + print("no replaces found") + return [] + + +def get_merges(from_repo, to_repo, directory): + print(f"get merges for {from_repo} to {to_repo}") + info = _get_release_info(directory) + if from_repo in info: + if "merge" in info[from_repo][to_repo]: + print(f"merges found: {info[from_repo][to_repo]['merge']}") + return info[from_repo][to_repo]["merge"] + print("no merges found") + return [] + + +def get_ignores(from_repo, to_repo, directory): + print(f"get ignores for {from_repo} to {to_repo}") + info = _get_release_info(directory) + if from_repo in info: + if "ignore" in info[from_repo][to_repo]: + print(f"ignores found: {info[from_repo][to_repo]['ignore']}") + return info[from_repo][to_repo]["ignore"] + print("no ignores found") + return [] + + +def main(): + print(f"[INFO] Version : {get_version('.')}") + + # from development to charts + print( + f"[INFO] Dev to charts repo merges : {get_merges('development','charts','.')}" + ) + + print( + f"[INFO] Dev to charts repo replace : {get_replaces('development','charts','.')}" + ) + + print( + f"[INFO] Dev to charts repo ignore : {get_ignores('development','charts','.')}" + ) + + # from development to stage + print(f"[INFO] Dev to stage repo merges : {get_merges('development','stage','.')}") + + print( + f"[INFO] Dev to stage repo replace : {get_replaces('development','stage','.')}" + ) + + print(f"[INFO] Dev to stage repo ignore : {get_ignores('development','stage','.')}") + + # From charts to development + print(f"[INFO] Chart to dev repo merges : {get_merges('charts','development','.')}") + + print( + f"[INFO] Chart to dev repo replace : {get_replaces('charts','development','.')}" + ) + + print( + f"[INFO] Chart to dev repo ignore : {get_ignores('charts','development','.')}" + ) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/release/releasechecker.py b/scripts/src/release/releasechecker.py new file mode 100644 index 0000000..135c9b7 --- /dev/null +++ b/scripts/src/release/releasechecker.py @@ -0,0 +1,313 @@ +""" +Used by a github action +1. To determine if the contents of pull request contain only the file which contains the charts release. +2. To determine if the release has been updated. + +parameters: + --api-url : API URL for the pull request. + --version : version to compare with the current version + +results: + if --api-url is specified, output variables are set: + PR_version : The chart verifier version read from the version file from the PR. + PR_release_image : The name of the image to be used for the release. + PR_release_info : Information about the release content. + PR_includes_release : Set to true if the PR contains the version file. + PR_release_body : Body of text to be used to describe the release. + if --version only is specified, output variables are set: + updated : set to true if the version specified is later than the version in the version file + from the main branch. + if neither parameters are specified, output variables are set: + PR_version : The chart verifier version read from the version file from main branch. + PR_release_image : The name of the image from the version file from main branch. +""" + + +import re +import os +import argparse +import json +import semver +import sys +from release import release_info +from release import releaser +from reporegex import matchers + +sys.path.append("../") +from owners import checkuser +from tools import gitutils +from pullrequest import prartifact + +VERSION_FILE = "release/release_info.json" +CHARTS_PR_BASE_REPO = gitutils.CHARTS_REPO +CHARTS_PR_HEAD_REPO = gitutils.CHARTS_REPO +DEV_PR_BASE_REPO = gitutils.DEVELOPMENT_REPO +DEV_PR_HEAD_REPO = gitutils.DEVELOPMENT_REPO +STAGE_PR_BASE_REPO = gitutils.STAGE_REPO +STAGE_PR_HEAD_REPO = gitutils.STAGE_REPO +DEFAULT_BOT_NAME = "openshift-helm-charts-bot" +ERROR_IF_MATCH_NOT_FOUND = False +ERROR_IF_MATCH_FOUND = True + + +def check_file_in_pr(api_url, pattern, error_value): + print("[INFO] check if PR for matching files") + files = prartifact.get_modified_files(api_url) + + for file_path in files: + match = pattern.match(file_path) + if not match and not error_value: + print(f"[INFO] stop non match found : {file_path}") + return False + elif match and error_value: + print(f"[INFO] stop match found : {file_path}") + return False + + return True + + +def check_if_only_charts_are_included(api_url): + print("[INFO] check if only chart files are included") + chart_pattern = re.compile( + matchers.submission_path_matcher(include_version_matcher=False) + r"./*" + ) + return check_file_in_pr(api_url, chart_pattern, ERROR_IF_MATCH_NOT_FOUND) + + +def check_if_no_charts_are_included(api_url): + print("[INFO] check if no chart files are included") + chart_pattern = re.compile( + matchers.submission_path_matcher(include_version_matcher=False) + r"./*" + ) + return check_file_in_pr(api_url, chart_pattern, ERROR_IF_MATCH_FOUND) + + +def check_if_only_version_file_is_modified(api_url): + print("[INFO] check if only version file is modified") + pattern_versionfile = re.compile(r"release/release_info.json") + return check_file_in_pr(api_url, pattern_versionfile, ERROR_IF_MATCH_NOT_FOUND) + + +def check_if_dev_release_branch(sender, pr_branch, pr_body, api_url, pr_head_repo): + print("[INFO] check if PR is release branch on dev") + + if sender != os.environ.get("BOT_NAME") and sender != DEFAULT_BOT_NAME: + print(f"Sender indicates PR is not part of a release: {sender}") + return False + + if not checkuser.verify_user(sender): + print(f"Sender is not authorized to create a release PR : {sender}") + return False + + if not pr_branch.startswith(releaser.DEV_PR_BRANCH_NAME_PREFIX): + print(f"PR branch indicates PR is not part of a release: {pr_branch}") + return False + + version = pr_branch.removeprefix(releaser.DEV_PR_BRANCH_NAME_PREFIX) + if not semver.VersionInfo.isvalid(version): + print( + f"Release part ({version}) of branch name {pr_branch} is not a valid semantic version." + ) + return False + + if not pr_head_repo.endswith(DEV_PR_HEAD_REPO): + print( + f"PR does not have the expected origin. Got: {pr_head_repo}, expected: {DEV_PR_HEAD_REPO}" + ) + return False + + if not pr_body.startswith(releaser.DEV_PR_BRANCH_BODY_PREFIX): + print(f"PR title indicates PR is not part of a release: {pr_body}") + return False + + return check_if_only_charts_are_included(api_url) + + +def check_if_charts_release_branch(sender, pr_branch, pr_body, api_url, pr_head_repo): + print("[INFO] check if PR is release branch on charts") + + if sender != os.environ.get("BOT_NAME") and sender != DEFAULT_BOT_NAME: + print(f"Sender indicates PR is not part of a release: {sender}") + return False + + if not checkuser.verify_user(sender): + print(f"Sender is not authorized to create a release PR : {sender}") + return False + + if not pr_branch.startswith(releaser.CHARTS_PR_BRANCH_NAME_PREFIX): + print(f"PR branch indicates PR is not part of a release: {pr_branch}") + return False + + version = pr_branch.removeprefix(releaser.CHARTS_PR_BRANCH_NAME_PREFIX) + if not semver.VersionInfo.isvalid(version): + print( + f"Release part ({version}) of branch name {pr_branch} is not a valid semantic version." + ) + return False + + if not pr_head_repo.endswith(CHARTS_PR_HEAD_REPO) and not pr_head_repo.endswith( + STAGE_PR_HEAD_REPO + ): + print( + f"PR does not have the expected origin. Got: {pr_head_repo}, expected: {CHARTS_PR_HEAD_REPO}" + ) + return False + + if not pr_body.startswith(releaser.CHARTS_PR_BRANCH_BODY_PREFIX): + print(f"PR title indicates PR is not part of a release: {pr_body}") + return False + + return check_if_no_charts_are_included(api_url) + + +def make_release_body(version, release_info): + body = f"Charts workflow version {version}

" + body += "This version includes:
" + for info in release_info: + body += f"- {info}
" + + print(f"[INFO] Release body: {body}") + gitutils.add_output("PR_release_body", body) + + +def get_version_info(): + data = {} + with open(VERSION_FILE) as json_file: + data = json.load(json_file) + return data + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-a", + "--api-url", + dest="api_url", + type=str, + required=False, + help="API URL for the pull request", + ) + parser.add_argument( + "-v", + "--version", + dest="version", + type=str, + required=False, + help="Version to compare", + ) + parser.add_argument( + "-s", + "--sender", + dest="sender", + type=str, + required=False, + help="sender of the PR", + ) + parser.add_argument( + "-b", + "--pr_branch", + dest="pr_branch", + type=str, + required=False, + help="PR branch name", + ) + parser.add_argument( + "-t", "--pr_body", dest="pr_body", type=str, required=False, help="PR title" + ) + parser.add_argument( + "-r", + "--pr_base_repo", + dest="pr_base_repo", + type=str, + required=False, + help="PR target repo", + ) + parser.add_argument( + "-z", + "--pr_head_repo", + dest="pr_head_repo", + type=str, + required=False, + help="PR source repo", + ) + + args = parser.parse_args() + + print("[INFO] release checker inputs:") + print(f"[INFO] arg api-url : {args.api_url}") + print(f"[INFO] arg version : {args.version}") + print(f"[INFO] arg sender : {args.sender}") + print(f"[INFO] arg pr_branch : {args.pr_branch}") + print(f"[INFO] arg pr_body : {args.pr_body}") + print(f"[INFO] arg pr base repo : {args.pr_base_repo}") + print(f"[INFO] arg pr head repo : {args.pr_head_repo}") + + if args.pr_branch: + if args.pr_base_repo.endswith(DEV_PR_BASE_REPO): + if check_if_dev_release_branch( + args.sender, + args.pr_branch, + args.pr_body, + args.api_url, + args.pr_head_repo, + ): + print("[INFO] Dev release pull request found") + gitutils.add_output("dev_release_branch", "true") + version = args.pr_branch.removeprefix( + releaser.DEV_PR_BRANCH_NAME_PREFIX + ) + gitutils.add_output("PR_version", version) + gitutils.add_output("PR_release_body", args.pr_body) + elif args.pr_base_repo.endswith( + CHARTS_PR_BASE_REPO + ) or args.pr_base_repo.endswith(STAGE_PR_BASE_REPO): + if check_if_charts_release_branch( + args.sender, + args.pr_branch, + args.pr_body, + args.api_url, + args.pr_head_repo, + ): + print("[INFO] Workflow release pull request found") + gitutils.add_output("charts_release_branch", "true") + + elif args.api_url: + # should be on PR branch + if args.pr_base_repo.endswith(DEV_PR_BASE_REPO): + version_only = check_if_only_version_file_is_modified(args.api_url) + user_authorized = checkuser.verify_user(args.sender) + if version_only and user_authorized: + organization = args.pr_base_repo.removesuffix(DEV_PR_BASE_REPO) + gitutils.add_output( + "charts_repo", f"{organization}{CHARTS_PR_BASE_REPO}" + ) + gitutils.add_output("stage_repo", f"{organization}{STAGE_PR_BASE_REPO}") + version = release_info.get_version("./") + version_info = release_info.get_info("./") + print(f"[INFO] Release found in PR files : {version}.") + gitutils.add_output("PR_version", version) + gitutils.add_output("PR_release_info", version_info) + gitutils.add_output("PR_includes_release_only", "true") + make_release_body(version, version_info) + elif version_only and not user_authorized: + print(f"[ERROR] sender not authorized : {args.sender}.") + gitutils.add_output("sender_not_authorized", "true") + else: + print("[INFO] Not a release PR") + else: + print(f"[INFO] Not a release PR, target is not : {DEV_PR_BASE_REPO}.") + else: + version = release_info.get_version("./") + if args.version: + # should be on main branch + if semver.compare(args.version, version) > 0: + print( + f"[INFO] Release {args.version} found in PR files is newer than: {version}." + ) + gitutils.add_output("release_updated", "true") + else: + print( + f"[ERROR] Release found in PR files is not new : {args.version}." + ) + else: + print("[ERROR] no valid parameter set to release checker.") diff --git a/scripts/src/release/releaser.py b/scripts/src/release/releaser.py new file mode 100644 index 0000000..0f5926b --- /dev/null +++ b/scripts/src/release/releaser.py @@ -0,0 +1,284 @@ +""" +Used within github actions, specifically as part of the charts auto release process defined in +.github/workflow/release.yml. Makes all of the changes required to both the chart and development repos. + + +parameters : + --version : the Version of the the release being created. + --pr_dir : the directory containing the PR contents. Used to get the release-info.jso file. + --development_dir : the directory containing the latest version of the development repository. + --charts_dir : the directory containing the latest version of the charts repository. + +Performs these action. +- Gets a list of updates to perform from the pr_dir releases/release_info.json file. These updates are then made +to the charts and development repositories. +- Adds the cron job to .github/worklfows/schedule.yml and changes the verifier image used in .github/worklfows/schedule.yml + to latest, as required. The charts repo is updated from development repo which necessitates these update. +- Create a PR against the charts repo containing the workflow updates. This requires manual merge. +- Directly commits to the development main branch any new charts added to the charts repo since the last update. + + +""" +import os +import argparse +import sys +import shutil +from release import release_info + +sys.path.append("../") +from tools import gitutils + +VERSION_CHECK_YAML_FILE = ".github/workflows/version_check.yml" +BUILD_YAML_FILE = ".github/workflows/build.yml" +DEV_PR_BRANCH_BODY_PREFIX = "Charts workflow version" +DEV_PR_BRANCH_NAME_PREFIX = "Auto-Release-" +CHARTS_PR_BRANCH_BODY_PREFIX = "Workflow and script updates from development repository" +CHARTS_PR_BRANCH_NAME_PREFIX = "Release-" +STAGE_PR_BRANCH_BODY_PREFIX = "Workflow and script updates from development repository" +STAGE_PR_BRANCH_NAME_PREFIX = "Release-" + +SCHEDULE_INSERT = [ + " # Daily trigger to check updates", + " schedule:", + ' - cron: "0 0 * * *"', +] + + +def update_workflow(): + lines = [] + with open(VERSION_CHECK_YAML_FILE, "r") as schedule_file: + lines = schedule_file.readlines() + + for line in lines: + if line.strip() == "on:": + insert_location = lines.index(line) + 1 + if SCHEDULE_INSERT[0] not in lines[insert_location].rstrip(): + print("[INFO] add cron job to schedule.yaml") + lines.insert(insert_location, f"{SCHEDULE_INSERT[0]}\n") + lines.insert(insert_location + 1, f"{SCHEDULE_INSERT[1]}\n") + lines.insert(insert_location + 2, f"{SCHEDULE_INSERT[2]}\n") + break + + with open(VERSION_CHECK_YAML_FILE, "w") as schedule_file: + schedule_file.write("".join(lines)) + + +def make_required_changes(release_info_dir, origin, destination): + print(f"Make required changes from {origin} to {destination}") + + if "charts" in origin and "dev" in destination: + from_repository = "charts" + to_repository = "development" + elif "dev" in origin and "charts" in destination: + from_repository = "development" + to_repository = "charts" + elif "dev" in origin and "stage" in destination: + from_repository = "development" + to_repository = "stage" + else: + sys.exit("Wrong arguments while calling make_required_changes") + + replaces = release_info.get_replaces( + from_repository, to_repository, release_info_dir + ) + + for replace in replaces: + replace_this = f"{destination}/{replace}" + with_this = f"{origin}/{replace}" + if os.path.isdir(with_this) or os.path.isdir(replace_this): + print(f"Replace directory {replace_this} with {with_this}") + if os.path.isdir(replace_this): + shutil.rmtree(replace_this) + shutil.copytree(with_this, replace_this) + else: + print(f"Replace file {replace_this} with {with_this}") + shutil.copy2(with_this, replace_this) + + merges = release_info.get_merges(from_repository, to_repository, release_info_dir) + + for merge in merges: + merge_this = f"{origin}/{merge}" + into_this = f"{destination}/{merge}" + + if os.path.isdir(merge_this) or os.path.isdir(into_this): + print(f"Merge directory {merge_this} with {into_this}") + shutil.copytree(merge_this, into_this, dirs_exist_ok=True) + else: + print(f"Merge file {merge_this} with {into_this}") + shutil.copy2(merge_this, into_this) + + ignores = release_info.get_ignores(from_repository, to_repository, release_info_dir) + for ignore in ignores: + ignore_this = f"{destination}/{ignore}" + if os.path.isdir(ignore_this): + print(f"Ignore/delete directory {ignore_this}") + shutil.rmtree(ignore_this) + else: + print(f"Ignore/delete file {ignore_this}") + os.remove(ignore_this) + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + "-v", + "--version", + dest="version", + type=str, + required=True, + help="Version to compare", + ) + parser.add_argument( + "-d", + "--development_dir", + dest="dev_dir", + type=str, + required=True, + help="Directory of development code with latest release info.", + ) + parser.add_argument( + "-c", + "--charts_dir", + dest="charts_dir", + type=str, + required=True, + help="Directory of charts code.", + ) + parser.add_argument( + "-s", + "--stage_dir", + dest="stage_dir", + type=str, + required=True, + help="Directory of stage code.", + ) + parser.add_argument( + "-p", + "--pr_dir", + dest="pr_dir", + type=str, + required=True, + help="Directory of pull request code.", + ) + parser.add_argument( + "-b", + "--dev_pr_body", + dest="dev_pr_body", + type=str, + required=True, + help="Body to use for the dev PR", + ) + parser.add_argument( + "-t", + "--target_branch", + dest="target_branch", + type=str, + required=True, + help="Target branch of the Pull Request", + ) + parser.add_argument( + "-r", + "--target_repository", + dest="target_repository", + type=str, + required=True, + help="Repository which is the target of the pull request", + ) + + args = parser.parse_args() + + print("[INFO] releaser inputs:") + print(f"[INFO] arg version : {args.version}") + print(f"[INFO] arg dev_dir : {args.dev_dir}") + print(f"[INFO] arg charts_dir : {args.charts_dir}") + print(f"[INFO] arg stage_dir : {args.stage_dir}") + print(f"[INFO] arg pr_dir : {args.pr_dir}") + print(f"[INFO] arg dev_pr_body : {args.dev_pr_body}") + print(f"[INFO] arg target_branch : {args.target_branch}") + print(f"[INFO] arg target_repository : {args.target_repository}") + + start_directory = os.getcwd() + print(f"working directory: {start_directory}") + + print("make changes to charts from development") + make_required_changes(args.pr_dir, args.dev_dir, args.charts_dir) + + print("edit files in charts") + os.chdir(args.charts_dir) + update_workflow() + + organization = args.target_repository.split("/")[0] + charts_repository = f"{organization}{gitutils.CHARTS_REPO}" + print( + f"create charts pull request, repository: {charts_repository}, branch: {args.target_branch} " + ) + branch_name = f"{CHARTS_PR_BRANCH_NAME_PREFIX}{args.version}" + message = f"{CHARTS_PR_BRANCH_BODY_PREFIX} {branch_name}" + outcome = gitutils.create_pr( + branch_name, [], charts_repository, message, args.target_branch + ) + if outcome == gitutils.PR_CREATED: + gitutils.add_output("charts_pr_created", "true") + elif outcome == gitutils.PR_NOT_NEEDED: + gitutils.add_output("charts_pr_not_needed", "true") + else: + print("[ERROR] error creating charts PR") + gitutils.add_output("charts_pr_error", "true") + os.chdir(start_directory) + return + + os.chdir(start_directory) + + print("make changes to development from charts") + make_required_changes(args.pr_dir, args.charts_dir, args.dev_dir) + + os.chdir(args.dev_dir) + print("create development pull request") + branch_name = f"{DEV_PR_BRANCH_NAME_PREFIX}{args.version}" + outcome = gitutils.create_pr( + branch_name, + [release_info.RELEASE_INFO_FILE], + args.target_repository, + args.dev_pr_body, + args.target_branch, + ) + if outcome == gitutils.PR_CREATED: + print("Dev PR successfully created.") + gitutils.add_output("dev_pr_created", "true") + elif outcome == gitutils.PR_NOT_NEEDED: + print("Dev PR not needed.") + gitutils.add_output("dev_pr_not_needed", "true") + else: + print("[ERROR] error creating development PR.") + gitutils.add_output("dev_pr_error", "true") + + os.chdir(start_directory) + + print("make changes to stage from development") + make_required_changes(args.pr_dir, args.dev_dir, args.stage_dir) + os.chdir(args.stage_dir) + stage_repository = f"{organization}{gitutils.STAGE_REPO}" + print( + f"create stage pull request, repository: {stage_repository}, branch: {args.target_branch} " + ) + branch_name = f"{STAGE_PR_BRANCH_NAME_PREFIX}{args.version}" + message = f"{STAGE_PR_BRANCH_BODY_PREFIX} {branch_name}" + outcome = gitutils.create_pr( + branch_name, [], stage_repository, message, args.target_branch + ) + if outcome == gitutils.PR_CREATED: + gitutils.add_output("stage_pr_created", "true") + elif outcome == gitutils.PR_NOT_NEEDED: + gitutils.add_output("stage_pr_not_needed", "true") + else: + print("[ERROR] error creating stage PR") + gitutils.add_output("stage_pr_error", "true") + os.chdir(start_directory) + return + + os.chdir(start_directory) + + +if __name__ == "__main__": + main() diff --git a/scripts/src/reporegex/__init__.py b/scripts/src/reporegex/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/reporegex/matchers.py b/scripts/src/reporegex/matchers.py new file mode 100644 index 0000000..2519a72 --- /dev/null +++ b/scripts/src/reporegex/matchers.py @@ -0,0 +1,41 @@ +def submission_path_matcher( + base_dir="charts", strict_categories=True, include_version_matcher=True +): + """Returns a regex string with various submission-related groupings. + + The groupings returned (in order) are: category, organization, chart name, + and optionally, version. + + Callers should append any relevant path matching to the end of the string + returned from this function. E.g. "/.*" + + Args: + base_dir: The base path of the expression statement. Should + not be empty. + strict_categories: Whether the category matcher should match only the + relevant categories, or any word at all. + include_version_matcher: Whether or not the version matcher should be + appended. In some cases, the caller of this regex doesn't care about the + versioning detail. + + Returns: + A regular expression-compatible string with the mentioned groupings. + """ + + relaxedCategoryMatcher = "\w+" + strictCategoryMatcher = "partners|redhat|community" + + categoryMatcher = ( + strictCategoryMatcher if strict_categories else relaxedCategoryMatcher + ) + organizationMatcher = "[\w-]+" + chartMatcher = "[\w-]+" + versionMatcher = "[\w\.\-+]+" + + matcher = ( + rf"{base_dir}/({categoryMatcher})/({organizationMatcher})/({chartMatcher})" + ) + if include_version_matcher: + matcher += rf"/({versionMatcher})" + + return matcher diff --git a/scripts/src/report/__init__.py b/scripts/src/report/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/report/get_verify_params.py b/scripts/src/report/get_verify_params.py new file mode 100644 index 0000000..58152d8 --- /dev/null +++ b/scripts/src/report/get_verify_params.py @@ -0,0 +1,114 @@ +import os +import sys +import argparse + +sys.path.append("../") +from chartprreview import chartprreview +from signedchart import signedchart +from tools import gitutils + + +def get_report_full_path(category, organization, chart, version): + return os.path.join( + os.getcwd(), get_report_relative_path(category, organization, chart, version) + ) + + +def get_report_relative_path(category, organization, chart, version): + return os.path.join("charts", category, organization, chart, version, "report.yaml") + + +def generate_verify_options(directory, category, organization, chart, version): + print("[INFO] Generate verify options. %s, %s, %s" % (organization, chart, version)) + src = os.path.join( + os.getcwd(), "charts", category, organization, chart, version, "src" + ) + report_path = get_report_full_path(category, organization, chart, version) + tar = os.path.join( + os.getcwd(), + "charts", + category, + organization, + chart, + version, + f"{chart}-{version}.tgz", + ) + + print(f"[INF0] report path exists = {os.path.exists(report_path)} : {report_path}") + print(f"[INF0] src path exists = {os.path.exists(src)} : {src}") + print(f"[INF0] tarball path = {os.path.exists(tar)} : {tar}") + + flags = f"--set profile.vendortype={category}" + cluster_needed = True + report_provided = False + if os.path.exists(report_path): + print("[INFO] report is included") + flags = f"{flags} -e has-readme" + cluster_needed = False + report_provided = True + + if os.path.exists(src) and not os.path.exists(tar): + print("[INFO] chart src included") + return flags, src, True, cluster_needed, report_provided + elif os.path.exists(tar) and not os.path.exists(src): + print("[INFO] tarball included") + if not os.path.exists(report_path): + owners_file = os.path.join( + os.getcwd(), "charts", category, organization, chart, "OWNERS" + ) + signed_flags = signedchart.get_verifier_flags(tar, owners_file, directory) + if signed_flags: + print(f"[INFO] include flags for signed chart: {signed_flags}") + flags = f"{flags} {signed_flags}" + return flags, tar, True, cluster_needed, report_provided + elif os.path.exists(tar) and os.path.exists(src): + msg = "[ERROR] Both chart source directory and tarball should not exist" + chartprreview.write_error_log(directory, msg) + sys.exit(1) + else: + print("[INFO] report only") + return "", "", False, False, report_provided + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=True, + help="API URL for the pull request", + ) + parser.add_argument( + "-d", + "--directory", + dest="directory", + type=str, + required=True, + help="artifact directory for archival", + ) + + args = parser.parse_args() + + category, organization, chart, version = chartprreview.get_modified_charts( + args.directory, args.api_url + ) + + ( + flags, + chart_uri, + report_needed, + cluster_needed, + report_provided, + ) = generate_verify_options(args.directory, category, organization, chart, version) + gitutils.add_output("report_provided", report_provided) + gitutils.add_output( + "provided_report_relative_path", + get_report_relative_path(category, organization, chart, version), + ) + gitutils.add_output("report_needed", report_needed) + gitutils.add_output("cluster_needed", cluster_needed) + if report_needed: + gitutils.add_output("verify_args", flags) + gitutils.add_output("verify_uri", chart_uri) diff --git a/scripts/src/report/report_info.py b/scripts/src/report/report_info.py new file mode 100644 index 0000000..304446c --- /dev/null +++ b/scripts/src/report/report_info.py @@ -0,0 +1,197 @@ +import os +import sys +import docker +import json +import subprocess + +REPORT_ANNOTATIONS = "annotations" +REPORT_RESULTS = "results" +REPORT_DIGESTS = "digests" +REPORT_METADATA = "metadata" +SHA_ERROR = "Digest in report did not match report content" + + +def write_error_log(*msg): + directory = os.environ.get("WORKFLOW_WORKING_DIRECTORY") + if directory: + os.makedirs(directory, exist_ok=True) + with open(os.path.join(directory, "errors"), "w") as fd: + for line in msg: + fd.write(line) + fd.write("\n") + + for line in msg: + print(line) + + +def _get_report_info( + report_path, report_info_path, info_type, profile_type, profile_version +): + if report_info_path and len(report_info_path) > 0: + print(f"[INFO] Using existing report info: {report_info_path}") + report_out = json.load(open(report_info_path)) + else: + command = "report" + set_values = "" + if profile_type: + set_values = "profile.vendortype=%s" % profile_type + if profile_version: + if set_values: + set_values = "%s,profile.version=%s" % (set_values, profile_version) + else: + set_values = "profile.version=%s" % profile_version + + if os.environ.get("VERIFIER_IMAGE"): + print(f"[INFO] Generate report info using docker : {report_path}") + docker_command = ( + f"{command} {info_type} /charts/{os.path.basename(report_path)}" + ) + if set_values: + docker_command = "%s --set %s" % (docker_command, set_values) + + client = docker.from_env() + report_directory = os.path.dirname(os.path.abspath(report_path)) + print( + f'Call docker using image: {os.environ.get("VERIFIER_IMAGE")}, docker command: {docker_command}, report directory: {report_directory}' + ) + output = client.containers.run( + os.environ.get("VERIFIER_IMAGE"), + docker_command, + stdin_open=True, + tty=True, + stdout=True, + volumes={report_directory: {"bind": "/charts/", "mode": "rw"}}, + ) + else: + print( + f"[INFO] Generate report info using chart-verifier on path : {os.path.abspath(report_path)}" + ) + if set_values: + out = subprocess.run( + [ + "chart-verifier", + command, + info_type, + "--set", + set_values, + os.path.abspath(report_path), + ], + capture_output=True, + ) + else: + out = subprocess.run( + [ + "chart-verifier", + command, + info_type, + os.path.abspath(report_path), + ], + capture_output=True, + ) + output = out.stdout.decode("utf-8") + + if SHA_ERROR in output: + msg = f"[ERROR] {SHA_ERROR}" + write_error_log(msg) + sys.exit(1) + + try: + report_out = json.loads(output) + except BaseException as err: + msgs = [] + msgs.append(f"[ERROR] loading report output: /n{output}") + msgs.append(f"[ERROR] exception was: {err=}, {type(err)=}") + write_error_log(*msgs) + sys.exit(1) + + if info_type not in report_out: + msg = f"Error extracting {info_type} from the report:", report_out.strip() + write_error_log(msg) + sys.exit(1) + + if info_type == REPORT_ANNOTATIONS: + annotations = {} + for report_annotation in report_out[REPORT_ANNOTATIONS]: + annotations[report_annotation["name"]] = report_annotation["value"] + + return annotations + + return report_out[info_type] + + +def get_report_annotations(report_path=None, report_info_path=None): + annotations = _get_report_info( + report_path, report_info_path, REPORT_ANNOTATIONS, "", "" + ) + print("[INFO] report annotations : %s" % annotations) + return annotations + + +def get_report_results( + report_path=None, profile_type=None, profile_version=None, report_info_path=None +): + results = _get_report_info( + report_path, report_info_path, REPORT_RESULTS, profile_type, profile_version + ) + print("[INFO] report results : %s" % results) + results["failed"] = int(results["failed"]) + results["passed"] = int(results["passed"]) + return results + + +def get_report_digests(report_path=None, report_info_path=None): + digests = _get_report_info(report_path, report_info_path, REPORT_DIGESTS, "", "") + print("[INFO] report digests : %s" % digests) + return digests + + +def get_report_metadata(report_path=None, report_info_path=None): + metadata = _get_report_info(report_path, report_info_path, REPORT_METADATA, "", "") + print("[INFO] report metadata : %s" % metadata) + return metadata + + +def get_report_chart_url(report_path=None, report_info_path=None): + metadata = _get_report_info(report_path, report_info_path, REPORT_METADATA, "", "") + print("[INFO] report chart-uri : %s" % metadata["chart-uri"]) + return metadata["chart-uri"] + + +def get_report_chart(report_path=None, report_info_path=None): + metadata = _get_report_info(report_path, report_info_path, REPORT_METADATA, "", "") + print("[INFO] report chart : %s" % metadata["chart"]) + return metadata["chart"] + + +def main(): + print("\n\n\n\nDocker image results:\n") + os.environ["VERIFIER_IMAGE"] = "quay.io/redhat-certification/chart-verifier:main" + get_report_results("./report.yaml", "", "") + get_report_results("./report.yaml", "community", "v1.1") + get_report_digests("./report.yaml") + get_report_metadata("./report.yaml") + get_report_annotations("./report.yaml") + get_report_chart_url("./report.yaml") + get_report_chart("./report.yaml") + + print("\n\n\n\nverifier command results:\n") + os.environ["VERIFIER_IMAGE"] = "" + get_report_results("./report.yaml", "", "") + get_report_results("./report.yaml", "community", "v1.1") + get_report_digests("./report.yaml") + get_report_metadata("./report.yaml") + get_report_annotations("./report.yaml") + get_report_chart_url("./report.yaml") + get_report_chart("./report.yaml") + + print("\n\n\n\nexisting report results:\n") + get_report_results(report_info_path="./report_info.json") + get_report_digests(report_info_path="./report_info.json") + get_report_metadata(report_info_path="./report_info.json") + get_report_annotations(report_info_path="./report_info.json") + get_report_chart_url(report_info_path="./report_info.json") + get_report_chart(report_info_path="./report_info.json") + + +if __name__ == "__main__": + main() diff --git a/scripts/src/report/verifier_report.py b/scripts/src/report/verifier_report.py new file mode 100644 index 0000000..e168f2a --- /dev/null +++ b/scripts/src/report/verifier_report.py @@ -0,0 +1,303 @@ +""" +Used by github actions,specifically as part of the charts auto release process defined in +.github/workflow/release.yml. + +Used to loosely determine if a submitted report is valid and has not been tampered with. + +An invalid valid report: +- does not load as a yaml file. +- does not include a "kind" attribute set the "verify-report" . +- does not include sections: "tool.metadata", "tool.chart", "results". + +A tampered report is only determined if the chart-testing check has passed: +- certifiedOpenShiftVersions or testOpenShiftVersion contain valid semantic versions. +- certifiedOpenShiftVersions or testOpenShiftVersion specify an OCP version with helm support (>=4.1.0) +- if the has-kubeversion check has also passed + - v1.0 profile: + - if a valid kubeVersion is specified in the chart it must include the certifiedOpenShiftVersions + - v1.1 profile and later + - a valid kubeVersion is specified in the chart + - supportedOpenShiftVersions is consistent the kubeVersion specified in the chart + - testOpenShiftVersion is within the supportedOpenShiftVersions + +These are not comprehensive lists - other certification checks will preform further checks +""" + +import sys +import semantic_version + +import yaml + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +sys.path.append("../") +from report import report_info + +MIN_SUPPORTED_OPENSHIFT_VERSION = semantic_version.SimpleSpec(">=4.1.0") +TESTED_VERSION_ANNOTATION = "charts.openshift.io/testedOpenShiftVersion" +CERTIFIED_VERSION_ANNOTATION = "charts.openshift.io/certifiedOpenShiftVersions" +SUPPORTED_VERSIONS_ANNOTATION = "charts.openshift.io/supportedOpenShiftVersions" +KUBE_VERSION_ATTRIBUTE = "kubeVersion" + + +def get_report_data(report_path): + """Load and returns the report data contained in report.yaml + + Args: + report_path (str): Path to the report.yaml file. + + Returns: + (bool, dict): A boolean indicating if the loading was successfull and the + content of the report.yaml file. + """ + try: + with open(report_path) as report_data: + report_content = yaml.load(report_data, Loader=Loader) + return True, report_content + except Exception as err: + print(f"Exception 2 loading file: {err}") + return False, "" + + +def get_result(report_data, check_name): + """Parse the report.yaml content for the result of a given check. + + Args: + report_data (dict): The content of the report.yaml file. + check_name (str): The name of the check to get the result for. + + Returns: + (bool, str): a boolean to True if the test passed, false otherwise + and the corresponding "reason" field. + """ + outcome = False + reason = "Not Found" + for result in report_data["results"]: + if result["check"].endswith(check_name): + reason = result["reason"] + if result["outcome"] == "PASS": + outcome = True + break + return outcome, reason + + +def get_chart_testing_result(report_data): + return get_result(report_data, "/chart-testing") + + +def get_has_kubeversion_result(report_data): + return get_result(report_data, "/has-kubeversion") + + +def get_signature_is_valid_result(report_data): + return get_result(report_data, "/signature-is-valid") + + +def get_profile_version(report_data): + profile_version = "1.1" + try: + profile_version = report_data["metadata"]["tool"]["profile"]["version"][1:] + except Exception: + pass + return profile_version + + +def get_web_catalog_only(report_data): + web_catalog_only = False + try: + if "webCatalogOnly" in report_data["metadata"]["tool"]: + web_catalog_only = report_data["metadata"]["tool"]["webCatalogOnly"] + if "providerControlledDelivery" in report_data["metadata"]["tool"]: + web_catalog_only = report_data["metadata"]["tool"][ + "providerControlledDelivery" + ] + except Exception as err: + print( + f"Exception getting webCatalogOnly/providerControlledDelivery {err=}, {type(err)=}" + ) + pass + return web_catalog_only + + +def get_package_digest(report_data): + package_digest = None + try: + digests = report_data["metadata"]["tool"]["digests"] + if "package" in digests: + package_digest = digests["package"] + except Exception as err: + print(f"Exception getting package digest {err=}, {type(err)=}") + pass + return package_digest + + +def get_public_key_digest(report_data): + """Get the public key digest from report.yaml + + Args: + report_data (dict): the report.yaml content + + Returns: + str: The public key digest from report.yaml. Set to None if not found. + """ + public_key_digest = None + try: + digests = report_data["metadata"]["tool"]["digests"] + if "publicKey" in digests: + public_key_digest = digests["publicKey"] + except Exception as err: + print(f"Exception getting publicKey digest {err=}, {type(err)=}") + pass + return public_key_digest + + +def report_is_valid(report_data): + """Check that the report.yaml contains the expected YAML structure + + Args: + dict: The content of report.yaml + + Returns: + bool: set to True if the report contains the correct structure, False otherwise. + """ + outcome = True + + if "kind" not in report_data or report_data["kind"] != "verify-report": + print("[ERROR] kind attribute invalid or missing from report") + return False + + if "results" not in report_data: + print("No results section in report") + outcome = False + if "metadata" not in report_data: + print("No metadata section in report") + outcome = False + else: + if "tool" not in report_data["metadata"]: + print("No tool metadata section in report") + outcome = False + if "chart" not in report_data["metadata"]: + print("No tool chart section in report") + outcome = False + + return outcome + + +def validate(report_path, ocp_version_range): + """Validate report.yaml by running a serie of checks. + + * Checks that the report.yaml contains valid YAML. + * Checks that the report.yaml contains the correct structure. + * Checks that the Chart has been successully tested (result of /chart-testing). + * Checks that the profile version used is valid SemVer. + * Checks that the expected annotation is present. + * Checks that the reported version of OCP and Kubernetes are valid and are coherent. + + Args: + report_path (str): Path to the report.yaml file + ocp_version_range (str): Range of supported OCP versions + + Returns: + (bool, str): if the checks all passed, this returns a bool set to True and an + empty str. Otherwise, this returns a bool set to True and the + corresponding error message. + """ + is_valid_yaml, report_data = get_report_data(report_path) + + if not is_valid_yaml: + return False, f"Report is not valid yaml: {report_path}" + + if not report_is_valid(report_data): + return False, f"Report is incomplete and cannot be processed: {report_path}" + + # No value in checking if chart testing failed + chart_testing_outcome, _ = get_chart_testing_result(report_data) + if chart_testing_outcome: + profile_version_string = get_profile_version(report_data) + + try: + profile_version = semantic_version.Version.coerce(profile_version_string) + v1_0_profile = False + if profile_version.major == 1 and profile_version.minor == 0: + v1_0_profile = True + except ValueError: + message = f"Invalid profile version in report : {profile_version_string}" + print(message) + return False, message + + annotations = report_info.get_report_annotations(report_path) + + if v1_0_profile: + tested_version_annotation = CERTIFIED_VERSION_ANNOTATION + else: + tested_version_annotation = TESTED_VERSION_ANNOTATION + + if tested_version_annotation in annotations: + tested_version_string = annotations[tested_version_annotation] + else: + return False, f"No annotation provided for {tested_version_annotation}" + + try: + tested_version = semantic_version.Version.coerce(tested_version_string) + if tested_version not in MIN_SUPPORTED_OPENSHIFT_VERSION: + return ( + False, + f"{tested_version_annotation} {tested_version_string} is not a supported OpenShift version.", + ) + except ValueError: + return ( + False, + f"{tested_version_annotation} {tested_version_string} is not a valid semantic version.", + ) + + has_kubeversion_outcome, _ = get_chart_testing_result(report_data) + if has_kubeversion_outcome: + if not v1_0_profile: + chart = report_info.get_report_chart(report_path) + kube_supported_versions = semantic_version.NpmSpec(ocp_version_range) + + if tested_version not in kube_supported_versions: + return ( + False, + f"Tested OpenShift version {str(tested_version)} not within specified kube-versions : {ocp_version_range}", + ) + + if SUPPORTED_VERSIONS_ANNOTATION in annotations: + supported_versions_string = annotations[ + SUPPORTED_VERSIONS_ANNOTATION + ] + try: + supported_versions = semantic_version.NpmSpec( + supported_versions_string + ) + except ValueError: + return ( + False, + f"{SUPPORTED_VERSIONS_ANNOTATION}: {supported_versions_string} is not a valid semantic version.", + ) + else: + return ( + False, + f"Missing annotation in report: {SUPPORTED_VERSIONS_ANNOTATION}", + ) + + if tested_version not in supported_versions: + return ( + False, + f"Tested OpenShift version {str(tested_version)} not within supported versions : {supported_versions_string}", + ) + + if supported_versions_string and supported_versions_string != str( + kube_supported_versions + ): + return ( + False, + f"Kube Version {chart[KUBE_VERSION_ATTRIBUTE]} -> {str(kube_supported_versions)} does not match supportedOpenShiftVersions: {supported_versions_string}", + ) + else: + print("[INFO] Chart testing failed so skip report checking") + + return True, "" diff --git a/scripts/src/saforcertadmin/cluster_role_binding.yaml b/scripts/src/saforcertadmin/cluster_role_binding.yaml new file mode 100644 index 0000000..f297bb7 --- /dev/null +++ b/scripts/src/saforcertadmin/cluster_role_binding.yaml @@ -0,0 +1,12 @@ +kind: ClusterRoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: rh-cert-user-cluster-role-binding +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin +subjects: + - kind: ServiceAccount + name: rh-cert-user + namespace: default diff --git a/scripts/src/saforcertadmin/create_sa.sh b/scripts/src/saforcertadmin/create_sa.sh new file mode 100755 index 0000000..dc59c7f --- /dev/null +++ b/scripts/src/saforcertadmin/create_sa.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +user_name='rh-cert-user' +oc create sa $user_name +token_secret=$(oc get secrets --field-selector=type=kubernetes.io/service-account-token -o=jsonpath="{.items[?(@.metadata.annotations.kubernetes\.io/service-account\.name=='"$user_name"')].metadata.name}") +token=$(oc get secret $token_secret -o json | jq -r .data.token | base64 -d) +oc apply -f cluster_role_binding.yaml + +echo "Service Account Token:" +echo $token diff --git a/scripts/src/saforcertadmin/push_secrets.py b/scripts/src/saforcertadmin/push_secrets.py new file mode 100644 index 0000000..9d1ecf4 --- /dev/null +++ b/scripts/src/saforcertadmin/push_secrets.py @@ -0,0 +1,171 @@ +""" +This script will help to list, create or update secrets of a repository + +Prerequsites: + +1. Before running this script, you have to set the GITHUB_TOKEN environment variable with as below: +export GITHUB_TOKEN= + +Note: Github Token you are using needs to have correct authorization to list/create/update the secrets + +2. Install the "pynacl" module using : pip install pynacl==1.5.0 + +Example Usage: + +1. To list the secret names of openshift-helm-charts/sandbox repository + python push_secrets.py -r openshift-helm-charts/sandbox -l + +2. To create or update the CLUSTER_TOKEN of openshift-helm-charts/sandbox repository + python push_secrets.py -r openshift-helm-charts/sandbox -s CLUSTER_TOKEN -v + +""" +from base64 import b64encode +from nacl import encoding, public +import logging +import os +import sys +import json +import requests +import argparse + +sys.path.append("../") +from pullrequest import prartifact + +token = os.environ.get("BOT_TOKEN") +headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f"token {token}", +} + +logging.basicConfig(level=logging.INFO) + + +def encrypt(public_key: str, secret_value: str) -> str: + """Encrypt a Unicode string using the public key.""" + public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + sealed_box = public.SealedBox(public_key) + encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + return b64encode(encrypted).decode("utf-8") + + +def get_repo_public_key(repo): + """Get the public key id and key of a github repository""" + response = requests.get( + f"https://api.github.com/repos/{repo}/actions/secrets/public-key", + headers=headers, + ) + if response.status_code != 200: + logging.error( + f"unexpected response getting repo public key : {response.status_code} : {response.reason}" + ) + sys.exit(1) + response_json = response.json() + + if prartifact.xRateLimit in response.headers: + print( + f"[DEBUG] {prartifact.xRateLimit} : {response.headers[prartifact.xRateLimit]}" + ) + if prartifact.xRateRemain in response.headers: + print( + f"[DEBUG] {prartifact.xRateRemain} : {response.headers[prartifact.xRateRemain]}" + ) + + if "message" in response_json: + print(f'[ERROR] getting public key: {response_json["message"]}') + sys.exit(1) + + return response_json["key_id"], response_json["key"] + + +def get_repo_secrets(repo): + """Get the list of secret names of a github repository""" + secret_names = [] + response = requests.get( + f"https://api.github.com/repos/{repo}/actions/secrets", headers=headers + ) + if response.status_code != 200: + logging.error( + f"[ERROR] unexpected response getting repo secrets : {response.status_code} : {response.reason}" + ) + sys.exit(1) + response_json = response.json() + if "message" in response_json: + print(f'[ERROR] getting repo secrets: {response_json["message"]}') + sys.exit(1) + for i in range(response_json["total_count"]): + secret_names.append(response_json["secrets"][i]["name"]) + return secret_names + + +def create_or_update_repo_secrets(repo, secret_name, key_id, encrypted_value): + """Create or update a github repository secret""" + response = requests.put( + f"https://api.github.com/repos/{repo}/actions/secrets/{secret_name}", + json={"key_id": key_id, "encrypted_value": encrypted_value}, + headers=headers, + ) + if response.status_code != 201 and response.status_code != 204: + logging.error( + f"unexpected response during put request : {response.status_code} : {response.reason}" + ) + sys.exit(1) + try: + response_json = response.json() + if "message" in response_json: + print(f'[ERROR] updating repo secret: {response_json["message"]}') + sys.exit(1) + except json.decoder.JSONDecodeError: + pass + + logging.info(f"Secret {secret_name} create or update successful") + + +def main(): + parser = argparse.ArgumentParser( + description="Script to list, create or update secrets of a repository" + ) + parser.add_argument( + "-r", + "--repo", + dest="repo", + type=str, + required=True, + help="Github repo name in {org}/{repo_name} format", + ) + parser.add_argument( + "-l", + "--list", + dest="list", + action="store_true", + required=False, + help="List the secret names", + ) + parser.add_argument( + "-s", "--secret", dest="secret", type=str, required=False, help="Secret name" + ) + parser.add_argument( + "-v", + "--value", + dest="value", + type=str, + required=False, + help="Secret value to set", + ) + args = parser.parse_args() + + if args.list: + secrets = get_repo_secrets(args.repo) + logging.info(f"Github Secret Names: {secrets}") + elif args.secret and args.value: + secret_name = args.secret + secret_value = args.value + logging.info(f"Setting SECRET: {secret_name}") + key_id, public_key = get_repo_public_key(args.repo) + encrypted_value = encrypt(public_key, secret_value) + create_or_update_repo_secrets(args.repo, secret_name, key_id, encrypted_value) + else: + logging.error("Wrong argument combination") + + +if __name__ == "__main__": + main() diff --git a/scripts/src/saforcharttesting/__init__.py b/scripts/src/saforcharttesting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/saforcharttesting/saforcharttesting.py b/scripts/src/saforcharttesting/saforcharttesting.py new file mode 100644 index 0000000..ba2ef7b --- /dev/null +++ b/scripts/src/saforcharttesting/saforcharttesting.py @@ -0,0 +1,358 @@ +import sys +import time +import os +import base64 +import json +import argparse +import subprocess +import tempfile +import re +from string import Template + +namespace_template = """\ +apiVersion: v1 +kind: Namespace +metadata: + name: ${name} +""" + +serviceaccount_template = """\ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: ${name} + namespace: ${name} +""" + +role_template = """\ +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: ${name} + namespace: ${name} +rules: + - apiGroups: + - "*" + resources: + - '*' + verbs: + - '*' +""" + +rolebinding_template = """\ +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: ${name} + namespace: ${name} +subjects: +- kind: ServiceAccount + name: ${name} + namespace: ${name} +roleRef: + kind: Role + name: ${name} +""" + +clusterrole_template = """\ +kind: ClusterRole +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: ${name} +rules: + - apiGroups: + - "config.openshift.io" + resources: + - 'clusteroperators' + verbs: + - 'get' + - apiGroups: + - "rbac.authorization.k8s.io" + resources: + - 'clusterrolebindings' + - 'clusterroles' + verbs: + - 'get' + - 'create' + - 'delete' + - apiGroups: + - "admissionregistration.k8s.io" + resources: + - 'mutatingwebhookconfigurations' + verbs: + - 'get' + - 'create' + - 'list' + - 'watch' + - 'patch' + - 'delete' + - apiGroups: + - "authentication.k8s.io" + resources: + - 'tokenreviews' + verbs: + - 'create' + - apiGroups: + - "authorization.k8s.io" + resources: + - 'subjectaccessreviews' + verbs: + - 'create' +""" + +clusterrolebinding_template = """\ +kind: ClusterRoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: ${name} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: ${name} +subjects: + - kind: ServiceAccount + name: ${name} + namespace: ${name} +""" + + +def apply_config(tmpl, **values): + with tempfile.TemporaryDirectory(prefix="sa-for-chart-testing-") as tmpdir: + content = Template(tmpl).substitute(values) + config_path = os.path.join(tmpdir, "config.yaml") + with open(config_path, "w") as fd: + fd.write(content) + out = subprocess.run(["oc", "apply", "-f", config_path], capture_output=True) + stdout = out.stdout.decode("utf-8") + if out.returncode != 0: + stderr = out.stderr.decode("utf-8") + else: + stderr = "" + + return stdout, stderr + + +def delete_config(tmpl, **values): + with tempfile.TemporaryDirectory(prefix="sa-for-chart-testing-") as tmpdir: + content = Template(tmpl).substitute(values) + config_path = os.path.join(tmpdir, "config.yaml") + with open(config_path, "w") as fd: + fd.write(content) + out = subprocess.run(["oc", "delete", "-f", config_path], capture_output=True) + stdout = out.stdout.decode("utf-8") + if out.returncode != 0: + stderr = out.stderr.decode("utf-8") + else: + stderr = "" + + return stdout, stderr + + +def create_namespace(namespace): + print("creating Namespace:", namespace) + stdout, stderr = apply_config(namespace_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating Namespace:", stderr) + + +def create_serviceaccount(namespace): + print("creating ServiceAccount:", namespace) + stdout, stderr = apply_config(serviceaccount_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating ServiceAccount:", stderr) + + +def create_role(namespace): + print("creating Role:", namespace) + stdout, stderr = apply_config(role_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating Role:", stderr) + + +def create_rolebinding(namespace): + print("creating RoleBinding:", namespace) + stdout, stderr = apply_config(rolebinding_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating RoleBinding:", stderr) + + +def create_clusterrole(namespace): + print("creating ClusterRole:", namespace) + stdout, stderr = apply_config(clusterrole_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating ClusterRole:", stderr) + + +def create_clusterrolebinding(namespace): + print("creating ClusterRoleBinding:", namespace) + stdout, stderr = apply_config(clusterrolebinding_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] creating ClusterRoleBinding:", stderr) + + +def delete_namespace(namespace): + print("deleting Namespace:", namespace) + stdout, stderr = delete_config(namespace_template, name=namespace) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] deleting Namespace:", namespace, stderr) + sys.exit(1) + + +def delete_clusterrole(name): + print("deleting ClusterRole:", name) + stdout, stderr = delete_config(clusterrole_template, name=name) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] deleting ClusterRole:", name, stderr) + sys.exit(1) + + +def delete_clusterrolebinding(name): + print("deleting ClusterRoleBinding:", name) + stdout, stderr = delete_config(clusterrolebinding_template, name=name) + print("stdout:\n", stdout, sep="") + if stderr.strip(): + print("[ERROR] deleting ClusterRoleBinding:", name, stderr) + sys.exit(1) + + +def write_sa_token(namespace, token): + secret_found = False + secrets = [] + for i in range(7): + out = subprocess.run( + ["oc", "get", "serviceaccount", namespace, "-n", namespace, "-o", "json"], + capture_output=True, + ) + stdout = out.stdout.decode("utf-8") + if out.returncode != 0: + stderr = out.stderr.decode("utf-8") + if stderr.strip(): + print("[ERROR] retrieving ServiceAccount:", namespace, stderr) + time.sleep(10) + else: + sa = json.loads(stdout) + if len(sa["secrets"]) >= 2: + secrets = sa["secrets"] + secret_found = True + break + else: + pattern = r"Tokens:\s+([A-Za-z0-9-]+)" + dout = subprocess.run( + ["oc", "describe", "serviceaccount", namespace, "-n", namespace], + capture_output=True, + ) + dstdout = dout.stdout.decode("utf-8") + match = re.search(pattern, dstdout) + if match: + token_name = match.group(1) + else: + print("[ERROR] Token not found, Exiting") + sys.exit(1) + secrets.append({"name": token_name}) + secret_found = True + break + time.sleep(10) + + if not secret_found: + print("[ERROR] retrieving ServiceAccount:", namespace, stderr) + sys.exit(1) + + for secret in secrets: + out = subprocess.run( + ["oc", "get", "secret", secret["name"], "-n", namespace, "-o", "json"], + capture_output=True, + ) + stdout = out.stdout.decode("utf-8") + if out.returncode != 0: + stderr = out.stderr.decode("utf-8") + if stderr.strip(): + print("[ERROR] retrieving secret:", secret["name"], stderr) + continue + else: + sec = json.loads(stdout) + if sec["type"] == "kubernetes.io/service-account-token": + content = sec["data"]["token"] + with open(token, "w") as fd: + fd.write(base64.b64decode(content).decode("utf-8")) + + +def switch_project_context(namespace, token, api_server): + tkn = open(token).read() + for i in range(7): + out = subprocess.run( + ["oc", "login", "--token", tkn, "--server", api_server], capture_output=True + ) + stdout = out.stdout.decode("utf-8") + print(stdout) + out = subprocess.run(["oc", "project", namespace], capture_output=True) + stdout = out.stdout.decode("utf-8") + print(stdout) + out = subprocess.run(["oc", "config", "current-context"], capture_output=True) + stdout = out.stdout.decode("utf-8").strip() + print(stdout) + if stdout.endswith(":".join((namespace, namespace))): + print("current-context:", stdout) + return + time.sleep(10) + + # This exit will happen if there is an infra failure + print( + """[ERROR] There is an error creating the namespace and service account. It happens due to some infrastructure failure. It is not directly related to the changes in the pull request. You can wait for some time and try to re-run the job. To re-run the job change the PR into a draft and remove the draft state.""" + ) + sys.exit(1) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", + "--create", + dest="create", + type=str, + required=False, + help="create service account and namespace for chart testing", + ) + parser.add_argument( + "-t", + "--token", + dest="token", + type=str, + required=False, + help="service account token for chart testing", + ) + parser.add_argument( + "-d", + "--delete", + dest="delete", + type=str, + required=False, + help="delete service account and namespace used for chart testing", + ) + parser.add_argument( + "-s", "--server", dest="server", type=str, required=False, help="API server URL" + ) + args = parser.parse_args() + + if args.create: + create_namespace(args.create) + create_serviceaccount(args.create) + create_role(args.create) + create_rolebinding(args.create) + create_clusterrole(args.create) + create_clusterrolebinding(args.create) + write_sa_token(args.create, args.token) + switch_project_context(args.create, args.token, args.server) + elif args.delete: + delete_clusterrolebinding(args.delete) + delete_clusterrole(args.delete) + delete_namespace(args.delete) + else: + parser.print_help() diff --git a/scripts/src/signedchart/__init__.py b/scripts/src/signedchart/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/signedchart/signedchart.py b/scripts/src/signedchart/signedchart.py new file mode 100644 index 0000000..df84c2c --- /dev/null +++ b/scripts/src/signedchart/signedchart.py @@ -0,0 +1,182 @@ +import sys +import subprocess +import base64 +import filecmp +import os +import re + +sys.path.append("../") +from report import verifier_report +from owners import owners_file +from pullrequest import prartifact +from reporegex import matchers + + +def check_and_prepare_signed_chart(api_url, report_path, owner_path, key_file_path): + signed_chart = is_chart_signed(api_url, report_path) + key_in_owners = False + keys_match = False + if signed_chart: + owners_pgp_key = get_pgp_key_from_owners(owner_path) + if owners_pgp_key: + key_in_owners = True + if report_path: + keys_match = check_pgp_public_key(owners_pgp_key, report_path) + elif key_file_path: + create_public_key_file(owners_pgp_key, key_file_path) + + return signed_chart, key_in_owners, keys_match + + +def get_verifier_flags(tar_file, owners_file, temp_dir): + prov_file = f"{tar_file}.prov" + if os.path.exists(prov_file): + gpg_key = get_pgp_key_from_owners(owners_file) + if gpg_key: + key_file = os.path.join(temp_dir, "pgp", f"{tar_file}.key") + create_public_key_file(gpg_key, key_file) + return f"--pgp-public-key {key_file}" + return "" + + +def is_chart_signed(api_url, report_path): + if api_url: + files = prartifact.get_modified_files(api_url) + tgz_pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r".*.tgz" + ) + tgz_found = False + prov_pattern = re.compile( + matchers.submission_path_matcher(strict_categories=False) + r".*.tgz.prov" + ) + prov_found = False + + for file_path in files: + if tgz_pattern.match(file_path): + tgz_found = True + if prov_pattern.match(file_path): + prov_found = True + + if tgz_found and prov_found: + return True + elif report_path: + return check_report_for_signed_chart(report_path) + + return False + + +def key_in_owners_match_report(owner_path, report_path): + owner_key = get_pgp_key_from_owners(owner_path) + if not owner_key: + return True + return check_pgp_public_key(owner_key, report_path) + + +def get_pgp_key_from_owners(owner_path): + found, owner_data = owners_file.get_owner_data_from_file(owner_path) + if found: + pgp_key = owners_file.get_pgp_public_key(owner_data) + return pgp_key + return "" + + +def check_report_for_signed_chart(report_path): + """Check that the report has passed the "signature-is-valid" test + + Args: + report_path (str): Path to the report.yaml file + + Returns: + bool: set to True if the report has passed the "signature-is-valid" test, + False otherwise + """ + found, report_data = verifier_report.get_report_data(report_path) + if found: + _, reason = verifier_report.get_signature_is_valid_result(report_data) + if "Chart is signed" in reason: + return True + return False + + +def check_pgp_public_key(owner_pgp_key, report_path): + """Check if the PGP key in the OWNERS file matches the one from report.yaml + + This checks passes if one of the following condition is met: + - The PGP keys match. + - The report is not for a signed chart + - The report is not found + + Consequently, the check fails if the report is found and one the following is true: + - The PGP keys do not match + - The report is for a signed chart but no PGP key is provided in report.yaml + + Args: + owner_pgp_key (str): The PGP key present in the OWNERS file. + report_path (str): Path to the report.yaml file. + + Returns: + bool: Set to True if the check passes, to False otherwise. + """ + found, report_data = verifier_report.get_report_data(report_path) + if found: + pgp_public_key_digest_owners = subprocess.getoutput( + f"echo {owner_pgp_key} | sha256sum" + ).split(" ")[0] + print(f"[INFO] digest of PGP key from OWNERS :{pgp_public_key_digest_owners}:") + pgp_public_digest_report = verifier_report.get_public_key_digest(report_data) + print(f"[INFO] PGP key digest in report :{pgp_public_digest_report}:") + if pgp_public_digest_report: + return pgp_public_key_digest_owners == pgp_public_digest_report + else: + return not check_report_for_signed_chart(report_path) + return True + + +def create_public_key_file(pgp_public_key_from_owners, key_file_path): + key_content = base64.b64decode(pgp_public_key_from_owners) + + key_file = open(key_file_path, "w") + key_file.write(key_content.decode("utf-8")) + key_file.close() + + +def main(): + if not is_chart_signed("", "./partner-report.yaml"): + print("ERROR chart is signed") + else: + print("PASS chart is signed") + + if not check_report_for_signed_chart("./partner-report.yaml"): + print("ERROR report indicates chart is signed") + else: + print("PASS report is signed") + + encoded_key_in_owners = get_pgp_key_from_owners("./OWNERS") + if not check_pgp_public_key(encoded_key_in_owners, "./partner-report.yaml"): + print("ERROR key digests do not match") + else: + print("PASS key digests match") + + signed, key_in_owners, keys_match = check_and_prepare_signed_chart( + "", "./partner-report.yaml", "./OWNERS", "./pgp.key" + ) + if signed and key_in_owners and keys_match: + print("PASS all is good") + else: + print( + f"ERROR, all true expected: signed = {signed}, key_in_owners = {key_in_owners}. keys_match = {keys_match}" + ) + + create_public_key_file(encoded_key_in_owners, "./pgp.key") + if os.path.exists("./pgp.key"): + if not filecmp.cmp("./psql-service-0.1.11.tgz.key", "./pgp.key"): + print("ERROR public key files file do not match") + else: + print("PASS public key files do match") + os.remove("./pgp.key") + else: + print("ERROR pgp key file was not created") + + +if __name__ == "__main__": + main() diff --git a/scripts/src/tools/__init__.py b/scripts/src/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/tools/gitutils.py b/scripts/src/tools/gitutils.py new file mode 100644 index 0000000..52debd7 --- /dev/null +++ b/scripts/src/tools/gitutils.py @@ -0,0 +1,175 @@ +""" +Used within github actions to perform github tasks, specifically as part of the charts auto release process +defined in .github/workflow/release.yml + +Requires the environment to contain: +BOT_TOKEN : valid Oauth token for a bot with permission to update development and chart repositories +BOT_NAME : valid owner of the BOT_TOKEN + +main functions : +- create_charts_pr - creates a PR to the charts repo based on changes made in the repository +- commit_development_change - directly commits changes to the main branch of the devlopment repository +""" + + +import os +import sys +import json +import requests +from git import Repo + +GITHUB_BASE_URL = "https://api.github.com" +CHARTS_REPO = "/charts" +DEVELOPMENT_REPO = "/development" +STAGE_REPO = "/stage" + +PR_CREATED = "PR_CREATED" +PR_NOT_NEEDED = "PR_NOT_NEEDED" +PR_FAILED = "PR_FAILED" + +# GitHub actions bot email for git email +GITHUB_ACTIONS_BOT_EMAIL = "41898282+github-actions[bot]@users.noreply.github.com" + + +def set_git_username_email(repo, username, email): + """ + Parameters: + repo (git.Repo): git.Repo instance of the local directory + username (str): git username to set + email (str): git email to set + """ + repo.config_writer().set_value("user", "name", username).release() + repo.config_writer().set_value("user", "email", email).release() + + +def github_api_post(endpoint, headers, json): + r = requests.post(f"{GITHUB_BASE_URL}/{endpoint}", headers=headers, json=json) + + try: + response_json = r.json() + + if "message" in response_json: + print(f'[ERROR] from post request: {response_json["message"]}') + sys.exit(1) + except json.JSONDecodeError: + pass + + return r + + +def github_api_get(endpoint, headers): + r = requests.get(f"{GITHUB_BASE_URL}/{endpoint}", headers=headers) + response_json = r.json() + if "message" in response_json: + print(f'[ERROR] get request: {response_json["message"]}') + sys.exit(1) + + return r + + +def github_api(method, endpoint, bot_token, json={}): + headers = { + "Accept": "application/vnd.github.v3+json", + "Authorization": f"Bearer {bot_token}", + } + if method == "get": + return github_api_get(endpoint, headers) + elif method == "post": + return github_api_post(endpoint, headers, json) + else: + raise ValueError( + f"Github API method {method} not implemented in helper function" + ) + + +def get_bot_name_and_token(): + bot_name = os.environ.get("BOT_NAME") + bot_token = os.environ.get("BOT_TOKEN") + if not bot_name and not bot_token: + raise Exception("BOT_TOKEN environment variable not defined") + elif not bot_name: + raise Exception("BOT_TOKEN set but BOT_NAME not specified") + elif not bot_token: + raise Exception("BOT_NAME set but BOT_TOKEN not specified") + else: + print(f"found bot name ({bot_name}) and token.") + return bot_name, bot_token + + +def create_pr(branch_name, skip_files, repository, message, target_branch): + repo = Repo(os.getcwd()) + + bot_name, bot_token = get_bot_name_and_token() + set_git_username_email(repo, bot_name, GITHUB_ACTIONS_BOT_EMAIL) + + repo.create_head(branch_name) + print(f"checkout branch {branch_name}") + repo.git.checkout(branch_name) + + if add_changes(repo, skip_files): + print(f"commit changes with message: {branch_name}") + repo.index.commit(branch_name) + + print(f"push the branch {branch_name} to {repository}") + repo.git.push( + f"https://x-access-token:{bot_token}@github.com/{repository}", + f"HEAD:refs/heads/{branch_name}", + "-f", + ) + + print(f"make the pull request to {target_branch}") + data = { + "head": branch_name, + "base": f"{target_branch}", + "title": branch_name, + "body": f"{message}", + } + + r = github_api("post", f"repos/{repository}/pulls", bot_token, json=data) + + j = json.loads(r.text) + if "number" in j: + print(f"pull request info: {j['number']}") + return PR_CREATED + else: + print( + f"Unexpected response from PR. status code: {r.status_code}, text: {j}" + ) + return PR_FAILED + + else: + print(f"no changes required for {repository}") + return PR_NOT_NEEDED + + +def add_changes(repo, skip_files): + if len(skip_files) == 0: + changed = [item.a_path for item in repo.index.diff(None)] + for change in changed: + print(f"Changed file: {change}") + for add in repo.untracked_files: + print(f"Added file: {add}") + print("Add all changes") + repo.git.add(all=True) + else: + changed = [item.a_path for item in repo.index.diff(None)] + for change in changed: + if change in skip_files: + print(f"Skip changed file: {change}") + else: + print(f"Add changed file: {change}") + repo.git.add(change) + + for add in repo.untracked_files: + if add in skip_files: + print(f"Skip added file: {add}") + else: + print(f"Add added file: {add}") + repo.git.add(add) + + return len(repo.index.diff("HEAD")) > 0 + + +def add_output(name, value): + with open(os.environ["GITHUB_OUTPUT"], "a") as fh: + print(f"{name}={value}", file=fh) diff --git a/scripts/src/updateindex/__init__.py b/scripts/src/updateindex/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/updateindex/updateindex.py b/scripts/src/updateindex/updateindex.py new file mode 100644 index 0000000..62c688f --- /dev/null +++ b/scripts/src/updateindex/updateindex.py @@ -0,0 +1,229 @@ +"""This files downloads and updates the Helm repository index data +""" + +import argparse +import base64 +import hashlib +import json +import os +import requests +import sys +import yaml + +from datetime import datetime, timezone +from environs import Env + +try: + from yaml import CLoader as Loader, CDumper as Dumper +except ImportError: + from yaml import Loader, Dumper + + +def _decode_chart_entry(chart_entry_encoded): + """Decode the base64 encoded index entry to add. + + Args: + chart_entry_encoded (str): base64 encode index entry for this chart + + Returns: + dict: Decoded index entry + + """ + chart_entry_bytes = base64.b64decode(chart_entry_encoded) + chart_entry_str = chart_entry_bytes.decode() + + return json.loads(chart_entry_str) + + +def download_index(index_file, repository, branch): + """Download the index file to disk and retrieve its content. + + Args: + index_file (str): Path to the index file to update + repository (str): Name of the git Repository + branch (str): Git branch that hosts the Helm repository index + + Returns: + dict: The current content of the index + """ + print(f"Downloading {index_file}") + r = requests.get( + f"https://raw.githubusercontent.com/{repository}/{branch}/{index_file}" + ) + now = datetime.now(timezone.utc).astimezone().isoformat() + + if r.status_code == 200: + data = yaml.load(r.text, Loader=Loader) + data["generated"] = now + else: + data = {"apiVersion": "v1", "generated": now, "entries": {}} + + return data + + +def update_index( + index_data, + version, + chart_url, + chart_entry, + web_catalog_only, +): + """Update the Helm repository index file + + Args: + index_data (dict): Content of the Helm repo index + version (str): The version of the chart (ex: 1.4.0) + chart_url (str): URL of the Chart + chart_entry (dict): Index entry to add + web_catalog_only (bool): Set to True if the provider has chosen the Web Catalog + Only option. + + """ + now = datetime.now(timezone.utc).astimezone().isoformat() + + print("[INFO] Updating the chart entry with new version") + crtentries = [] + entry_name = os.environ.get("CHART_ENTRY_NAME") + if not entry_name: + print("[ERROR] Internal error: missing chart entry name") + sys.exit(1) + d = index_data["entries"].get(entry_name, []) + for v in d: + if v["version"] == version: + continue + crtentries.append(v) + + chart_entry["urls"] = [chart_url] + if not web_catalog_only: + set_package_digest(chart_entry, chart_url) + chart_entry["annotations"]["charts.openshift.io/submissionTimestamp"] = now + crtentries.append(chart_entry) + index_data["entries"][entry_name] = crtentries + + +def set_package_digest(chart_entry, chart_url): + """Check that the digest of the provided chart matches the digest of the chart that + has been uploaded in the GitHub release. + + Note that this is the reason why the GitHub release must have been created before + updating the index. + + Args: + chart_entry (dict): Index entry to add + chart_url (str): URL of the Chart + + """ + print("[INFO] set package digests.") + + head = requests.head(chart_url, allow_redirects=True) + print(f"[DEBUG]: tgz url : {chart_url}") + print(f"[DEBUG]: response code from head request: {head.status_code}") + + target_digest = "" + if head.status_code == 200: + response = requests.get(chart_url, allow_redirects=True) + print(f"[DEBUG]: response code get request: {response.status_code}") + target_digest = hashlib.sha256(response.content).hexdigest() + print(f"[DEBUG]: calculated digest : {target_digest}") + + pkg_digest = "" + if "digest" in chart_entry: + pkg_digest = chart_entry["digest"] + print(f"[DEBUG]: digest in report : {pkg_digest}") + + if target_digest: + if not pkg_digest: + # Digest was computed but not passed + chart_entry["digest"] = target_digest + elif pkg_digest != target_digest: + # Digest was passed and computed but differ + raise Exception( + "Found an integrity issue. SHA256 digest passed does not match SHA256 digest computed." + ) + elif not pkg_digest: + # Digest was not passed and could not be computed + raise Exception( + "Was unable to compute SHA256 digest, please ensure chart url points to a chart package." + ) + + +def write_index_file(index_data, index_file): + """Write the new content of the index to file + + Args: + index_data (dict): Content of the Helm repo index + index_file (str): Path to the index file to update + + """ + out = yaml.dump(index_data, Dumper=Dumper) + print(f"{index_file} content:\n", out) + with open(index_file, "w") as fd: + fd.write(out) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-b", + "--index-branch", + dest="index_branch", + type=str, + required=True, + help="Git branch that hosts the Helm repository index", + ) + parser.add_argument( + "-f", + "--index-file", + dest="index_file", + type=str, + required=True, + help="index file to update", + ) + parser.add_argument( + "-r", + "--repository", + dest="repository", + type=str, + required=True, + help="Name of the git Repository", + ) + parser.add_argument( + "-u", + "--chart-url", + dest="chart_url", + type=str, + required=True, + help="URL where the Chart is available", + ) + parser.add_argument( + "-e", + "--chart-entry", + dest="chart_entry_encoded", + type=str, + required=True, + help="Index entry to add", + ) + parser.add_argument( + "-v", + "--version", + dest="version", + type=str, + required=True, + help="Version of the chart being added", + ) + args = parser.parse_args() + + chart_entry = _decode_chart_entry(args.chart_entry_encoded) + + env = Env() + web_catalog_only = env.bool("WEB_CATALOG_ONLY", False) + + index_data = download_index(args.index_file, args.repository, args.index_branch) + update_index( + index_data, + args.version, + args.chart_url, + chart_entry, + web_catalog_only, + ) + write_index_file(index_data, args.index_file) diff --git a/scripts/src/workflowtesting/__init__.py b/scripts/src/workflowtesting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/src/workflowtesting/checkprforci.py b/scripts/src/workflowtesting/checkprforci.py new file mode 100644 index 0000000..7e86204 --- /dev/null +++ b/scripts/src/workflowtesting/checkprforci.py @@ -0,0 +1,129 @@ +import re +import argparse +import os +import yaml +import sys + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader +from tools import gitutils + +sys.path.append("../") +from pullrequest import prartifact + + +def check_if_ci_only_is_modified(api_url): + # api_url https://api.github.com/repos///pulls/1 + + files = prartifact.get_modified_files(api_url) + workflow_files = [ + re.compile(r".github/(workflows|actions)/.*"), + re.compile(r"scripts/.*"), + re.compile(r"tests/.*"), + ] + test_files = [ + re.compile(r"tests/functional/step_defs/.*_test_.*"), + re.compile(r"tests/functional/behave_features/.*.feature"), + ] + skip_build_files = [ + re.compile(r"release/release_info.json"), + re.compile(r"README.md"), + re.compile(r"docs/([\w-]+)\.md"), + ] + + print(f"[INFO] The following files were modified in this PR: {files}") + + workflow_found = False + others_found = False + tests_included = False + + for filename in files: + if any([pattern.match(filename) for pattern in workflow_files]): + print(f"[DEBUG] Modified file {filename} is a workflow file.") + workflow_found = True + # Tests are considered workflow files AND test files to inform other actions + # so we detect both separately. + if any([pattern.match(filename) for pattern in test_files]): + print(f"[DEBUG] Modified file {filename} is also a test file.") + tests_included = True + elif any([pattern.match(filename) for pattern in skip_build_files]): + print(f"[DEBUG] Modified file {filename} is a skippable file.") + others_found = True + else: + print( + f"[DEBUG] Modified file {filename} did not match any file paths of interest. Ignoring." + ) + continue + + if others_found and not workflow_found: + gitutils.add_output("do-not-build", "true") + elif tests_included: + print("[INFO] set full_tests_in_pr to true") + gitutils.add_output("full_tests_in_pr", "true") + + return workflow_found + + +def verify_user(username): + print(f"[INFO] Verify user. {username}") + owners_path = "OWNERS" + if not os.path.exists(owners_path): + print(f"[ERROR] {owners_path} file does not exist.") + else: + data = open(owners_path).read() + out = yaml.load(data, Loader=Loader) + if username in out["approvers"]: + print(f"[INFO] {username} authorized") + return True + else: + print(f"[ERROR] {username} cannot run tests") + return False + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-u", + "--api-url", + dest="api_url", + type=str, + required=False, + help="API URL for the pull request", + ) + parser.add_argument( + "-n", + "--verify-user", + dest="username", + type=str, + required=True, + help="check if the user can run tests", + ) + args = parser.parse_args() + if not args.api_url: + if verify_user(args.username): + print("[INFO] User authorized for manual invocation - run tests.") + gitutils.add_output("run-tests", "true") + else: + print( + "[INFO] User not authorized for manual invocation - do not run tests." + ) + gitutils.add_output("workflow-only-but-not-authorized", "true") + elif check_if_ci_only_is_modified(args.api_url): + if verify_user(args.username): + print( + "[INFO] PR is workflow changes only and user is authorized - run tests." + ) + gitutils.add_output("run-tests", "true") + else: + print( + "[INFO] PR is workflow changes only but user is not authorized - do not run tests." + ) + gitutils.add_output("workflow-only-but-not-authorized", "true") + else: + print("[INFO] Non workflow changes were found - do not run tests") + + +if __name__ == "__main__": + main()