diff --git a/.github/actions/workflow-build/action.yml b/.github/actions/workflow-build/action.yml
index 9f264e78e7c..e33d598fc56 100644
--- a/.github/actions/workflow-build/action.yml
+++ b/.github/actions/workflow-build/action.yml
@@ -38,9 +38,6 @@ runs:
id: get-pr-info
uses: nv-gha-runners/get-pr-info@main
- - run: mkdir workflow
- shell: bash --noprofile --norc -euo pipefail {0}
-
- name: Inspect changes
if: ${{ inputs.inspect-changes_script != '' }}
id: inspect-changes
@@ -51,13 +48,14 @@ runs:
echo "Running inspect-changes script..."
${{ inputs.inspect-changes_script }} ${base_sha} ${GITHUB_SHA}
echo "Exporting summary..."
+ mkdir workflow
cp ${GITHUB_STEP_SUMMARY} workflow/changes.md
- name: Parse matrix file into a workflow
id: build-workflow
shell: bash --noprofile --norc -euo pipefail {0}
env:
- skip_tests: ${{ inputs.skip_tests && '--skip-tests' || ''}}
+ skip_tests: ${{ inputs.skip_tests == 'true' && '--skip-tests' || ''}}
dirty_projects_flag: ${{ inputs.inspect-changes_script != '' && '--dirty-projects' || ''}}
dirty_projects: ${{ steps.inspect-changes.outputs.dirty_projects }}
matrix_parser: ${{ inputs.matrix_parser && inputs.matrix_parser || '${GITHUB_ACTION_PATH}/build-workflow.py' }}
@@ -69,13 +67,26 @@ runs:
${{ env.skip_tests }} \
${{ env.dirty_projects_flag }} ${{ env.dirty_projects }}
- echo "Exporting workflow artifacts..."
- grep -E '^WORKFLOW=' ${GITHUB_OUTPUT} | sed -e 's/^WORKFLOW=//' | jq . > workflow/workflow.json
- grep -E '^WORKFLOW_KEYS=' ${GITHUB_OUTPUT} | sed -e 's/^WORKFLOW_KEYS=//' | jq . > workflow/keys.json
- grep -E '^WORKFLOW_JOB_IDS=' ${GITHUB_OUTPUT} | sed -e 's/^WORKFLOW_JOB_IDS=//' | jq . > workflow/job_ids.json
+ echo "::group::Workflow"
+ cat workflow/workflow.json
+ echo "::endgroup::"
- echo "Exporting summary..."
- cp ${GITHUB_STEP_SUMMARY} workflow/runners.md
+ echo "::group::Runners"
+ cat workflow/runner_summary.json | jq -r '"# \(.heading)\n\n\(.body)"' | tee -a "${GITHUB_STEP_SUMMARY}"
+ echo "::endgroup::"
+
+ echo "::group::Job List"
+ cat workflow/job_list.txt
+ echo "::endgroup::"
+
+ echo "Setting outputs..."
+ echo "::group::GHA Output: WORKFLOW"
+ printf "WORKFLOW=%s\n" "$(cat workflow/workflow.json | jq -c '.')" | tee -a "${GITHUB_OUTPUT}"
+ echo "::endgroup::"
+
+ echo "::group::GHA Output: WORKFLOW_KEYS"
+ printf "WORKFLOW_KEYS=%s\n" "$(cat workflow/workflow_keys.json | jq -c '.')" | tee -a "${GITHUB_OUTPUT}"
+ echo "::endgroup::"
- name: Upload artifacts
uses: actions/upload-artifact@v3
diff --git a/.github/actions/workflow-build/build-workflow.py b/.github/actions/workflow-build/build-workflow.py
index ff1ff027a1d..6a8c3611ab4 100755
--- a/.github/actions/workflow-build/build-workflow.py
+++ b/.github/actions/workflow-build/build-workflow.py
@@ -71,30 +71,13 @@
matrix_yaml = None
-def write_output(key, value, outfile=sys.stderr):
- # Escape any newlines in the value:
- value = value.replace('\n', '\\n')
-
- print(f"::group::GHA Output: {key}", file=outfile)
- print(f"{key}={value}", file=outfile)
- print("::endgroup::", file=outfile)
-
- output_file = os.environ.get('GITHUB_OUTPUT')
- if output_file:
- with open(output_file, 'a') as f:
- print(f"{key}={value}", file=f)
-
-
-def write_step_summary(name, text, outfile=sys.stderr):
- print(f"::group::{name}", file=outfile)
- print(text, file=outfile)
- print(f"::endgroup::", file=outfile)
-
- output_file = os.environ.get('GITHUB_STEP_SUMMARY')
- if output_file:
- with open(output_file, 'a') as f:
- print(text, file=f)
+def write_json_file(filename, json_object):
+ with open(filename, 'w') as f:
+ json.dump(json_object, f, indent=2)
+def write_text_file(filename, text):
+ with open(filename, 'w') as f:
+ print(text, file=f)
def error_message_with_matrix_job(matrix_job, message):
return f"{matrix_job['origin']['workflow_location']}: {message}\n Input: {matrix_job['origin']['original_matrix_job']}"
@@ -494,65 +477,6 @@ def natural_sort_key(key):
return workflow_dispatch_groups
-def get_id_to_full_job_name_map(final_workflow):
- id_to_full_job_name = {}
- for group_name, group_json in final_workflow.items():
- if 'standalone' in group_json:
- for job_json in group_json['standalone']:
- id_to_full_job_name[job_json['id']] = f"{group_name} {job_json['name']}"
- if 'two_stage' in group_json:
- for two_stage_json in group_json['two_stage']:
- for job_json in two_stage_json['producers'] + two_stage_json['consumers']:
- id_to_full_job_name[job_json['id']] = f"{group_name} {job_json['name']}"
- return id_to_full_job_name
-
-
-def pretty_print_workflow(final_workflow, outfile):
- print(f"::group::Job list", file=outfile)
-
- total_jobs = 0
- runner_counts = {}
-
- def print_job_array(key, group_json):
- nonlocal total_jobs
- nonlocal runner_counts
-
- job_array = group_json[key] if key in group_json else []
- for job_json in job_array:
- total_jobs += 1
- print(f"{total_jobs:4} {key:13} {job_json['name']}", file=outfile)
- runner = job_json['runner']
- runner_counts[runner] = runner_counts.get(runner, 0) + 1
-
- for group_name, group_json in final_workflow.items():
- print(f"{'':4} {group_name}:", file=outfile)
- print_job_array('standalone', group_json)
- if 'two_stage' in group_json:
- for two_stage_json in group_json['two_stage']:
- print_job_array('producers', two_stage_json)
- print_job_array('consumers', two_stage_json)
- print(f"::endgroup::", file=outfile)
-
- # Sort by descending counts:
- runner_counts = {k: v for k, v in sorted(runner_counts.items(), key=lambda item: item[1], reverse=True)}
-
- runner_counts_text = f"πββοΈ Runner counts (total jobs: {total_jobs})
\n\n"
-
- runner_counts_text += f"| {'num':^4} | Runner\n"
- runner_counts_text += "|------|------\n"
- for runner, count in runner_counts.items():
- runner_counts_text += f"| {count:4} | `{runner}`\n"
-
- runner_counts_text += " "
-
- write_output("RUNNER_COUNTS", runner_counts_text, outfile=outfile)
- write_step_summary("Runner Counts", runner_counts_text, outfile=outfile)
-
- print("::group::Final Workflow JSON", file=outfile)
- print(json.dumps(final_workflow, indent=2), file=outfile)
- print("::endgroup::", file=outfile)
-
-
def find_workflow_line_number(workflow_name):
regex = re.compile(f"^( )*{workflow_name}:", re.IGNORECASE)
line_number = 0
@@ -727,29 +651,60 @@ def parse_workflow_dispatch_groups(args, workflow_name):
return finalize_workflow_dispatch_groups(workflow_dispatch_groups)
+def write_outputs(final_workflow):
+ job_list = []
+ runner_counts = {}
+ id_to_full_job_name = {}
+
+ total_jobs = 0
+ def process_job_array(group_name, array_name, parent_json):
+ nonlocal job_list
+ nonlocal runner_counts
+ nonlocal total_jobs
+
+ job_array = parent_json[array_name] if array_name in parent_json else []
+ for job_json in job_array:
+ total_jobs += 1
+ job_list.append(f"{total_jobs:4} id: {job_json['id']:<4} {array_name:13} {job_json['name']}")
+ id_to_full_job_name[job_json['id']] = f"{group_name} {job_json['name']}"
+ runner = job_json['runner']
+ runner_counts[runner] = runner_counts.get(runner, 0) + 1
+
+ for group_name, group_json in final_workflow.items():
+ job_list.append(f"{'':4} {group_name}:")
+ process_job_array(group_name, 'standalone', group_json)
+ if 'two_stage' in group_json:
+ for two_stage_json in group_json['two_stage']:
+ process_job_array(group_name, 'producers', two_stage_json)
+ process_job_array(group_name, 'consumers', two_stage_json)
+
+ # Sort by descending counts:
+ runner_counts = {k: v for k, v in sorted(runner_counts.items(), key=lambda item: item[1], reverse=True)}
+
+ runner_heading = f"πβ Runner counts (total jobs: {total_jobs})"
+
+ runner_counts_table = f"| {'#':^4} | Runner\n"
+ runner_counts_table += "|------|------\n"
+ for runner, count in runner_counts.items():
+ runner_counts_table += f"| {count:4} | `{runner}`\n"
+
+ runner_json = { "heading": runner_heading, "body": runner_counts_table }
+
+ os.makedirs("workflow", exist_ok=True)
+ write_json_file("workflow/workflow.json", final_workflow)
+ write_json_file("workflow/workflow_keys.json", list(final_workflow.keys()))
+ write_json_file("workflow/job_ids.json", id_to_full_job_name)
+ write_text_file("workflow/job_list.txt", "\n".join(job_list))
+ write_json_file("workflow/runner_summary.json", runner_json)
+
+
def print_gha_workflow(args):
final_workflow = {}
for workflow_name in args.workflows:
workflow_dispatch_groups = parse_workflow_dispatch_groups(args, workflow_name)
merge_dispatch_groups(final_workflow, workflow_dispatch_groups)
- id_to_full_job_name = get_id_to_full_job_name_map(final_workflow)
-
- pretty_print_workflow(final_workflow, sys.stderr)
-
- print(f"::group::Job ID -> Name Map", file=sys.stderr)
- print(json.dumps(id_to_full_job_name, indent=2), file=sys.stderr)
- print(f"::endgroup::", file=sys.stderr)
-
- write_output("WORKFLOW",
- json.dumps(final_workflow, indent=None, separators=(',', ':')),
- outfile=sys.stdout)
- write_output("WORKFLOW_KEYS",
- json.dumps(list(final_workflow.keys()), indent=None, separators=(',', ':')),
- outfile=sys.stdout)
- write_output("WORKFLOW_JOB_IDS",
- json.dumps(id_to_full_job_name, indent=None, separators=(',', ':')),
- outfile=sys.stdout)
+ write_outputs(final_workflow)
def print_devcontainer_info(args):
diff --git a/.github/actions/workflow-results/action.yml b/.github/actions/workflow-results/action.yml
index 371560f0679..6696024fa34 100644
--- a/.github/actions/workflow-results/action.yml
+++ b/.github/actions/workflow-results/action.yml
@@ -35,31 +35,35 @@ runs:
name: dispatch-job-success
path: dispatch-job-success/
- - name: Print job summaries
+ - name: Prepare execution summary
id: job-summary
continue-on-error: true
shell: bash --noprofile --norc -euo pipefail {0}
run: |
echo "Generating job summary..."
- python3 "${GITHUB_ACTION_PATH}/print-execution-summary.py" workflow/workflow.json > workflow/execution.md
+ python3 "${GITHUB_ACTION_PATH}/prepare-execution-summary.py" workflow/workflow.json
- name: Prepare final summary
id: final-summary
continue-on-error: true
shell: bash --noprofile --norc -euo pipefail {0}
run: |
- printf "%s\n" \
- "π½CI Summaryβ¬οΈ
" \
- "$(cat workflow/execution.md)" \
- "$(cat workflow/runners.md)" \
- "$(cat workflow/changes.md)" \
- " " > workflow/final_summary.md
+ echo "::group::Final Summary"
+ python3 "${GITHUB_ACTION_PATH}/final-summary.py" | tee final_summary.md
+ echo "::endgroup::"
- cp workflow/final_summary.md ${GITHUB_STEP_SUMMARY}
- printf "SUMMARY=%q\n" "$(cat workflow/final_summary.md)" | tee -a "${GITHUB_OUTPUT}"
+ # This allows multiline strings and special characters to be passed through the GHA outputs:
+ url_encode_string() {
+ python3 -c "import sys; from urllib.parse import quote; print(quote(sys.stdin.read()))"
+ }
+
+ echo "::group::GHA Output: SUMMARY"
+ printf "SUMMARY=%s\n" "$(cat final_summary.md | url_encode_string)" | tee -a "${GITHUB_OUTPUT}"
+ echo "::endgroup::"
+
+ cp final_summary.md ${GITHUB_STEP_SUMMARY}
- name: Comment on PR
- if: ${{ ! cancelled() }}
continue-on-error: true
env:
PR_NUMBER: ${{ fromJSON(steps.get-pr-info.outputs.pr-info).number }}
@@ -71,7 +75,11 @@ runs:
const pr_number = process.env.PR_NUMBER;
const owner = 'NVIDIA';
const repo = 'cccl';
- const commentBody = process.env.COMMENT_BODY;
+ // Decode URL-encoded string for proper display in comments
+ const commentBody = decodeURIComponent(process.env.COMMENT_BODY);
+ console.log('::group::Commenting on PR #' + pr_number + ' with the following message:)
+ console.log(commentBody);
+ console.log('::endgroup::');
github.issues.createComment({
owner: owner,
repo: repo,
diff --git a/.github/actions/workflow-results/final-summary.py b/.github/actions/workflow-results/final-summary.py
new file mode 100755
index 00000000000..3057724d815
--- /dev/null
+++ b/.github/actions/workflow-results/final-summary.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+
+import json
+import os
+import re
+import sys
+
+
+def read_file(filepath):
+ with open(filepath, 'r') as f:
+ return f.read().rstrip("\n ")
+
+def print_file_if_present(filepath):
+ if os.path.exists(filepath):
+ print(read_file(filepath) + "\n\n")
+
+
+def print_summary_file(filepath, heading_level):
+ summary_json = json.load(open(filepath))
+ print(f"{summary_json['heading']}
\n")
+ print(summary_json["body"] + "\n")
+ print(" \n")
+
+
+def main():
+ # List of all projects detected in 'execution/projects/{project}_summary.json':
+ projects = []
+ project_file_regex="(.*)_summary.json"
+ for filename in os.listdir("execution/projects"):
+ match = re.match(project_file_regex, filename)
+ if match:
+ projects.append(match.group(1))
+
+ print(f"{read_file('execution/heading.txt')}
\n")
+
+ print("")
+ for project in projects:
+ print("- ")
+ print_summary_file(f"execution/projects/{project}_summary.json", 3)
+ print("
\n")
+
+ print_summary_file("workflow/runner_summary.json", 2)
+ print_file_if_present('workflow/changes.md')
+
+ print(" ")
+
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.github/actions/workflow-results/print-execution-summary.py b/.github/actions/workflow-results/prepare-execution-summary.py
similarity index 75%
rename from .github/actions/workflow-results/print-execution-summary.py
rename to .github/actions/workflow-results/prepare-execution-summary.py
index 6549b7e2a49..26b8e823639 100755
--- a/.github/actions/workflow-results/print-execution-summary.py
+++ b/.github/actions/workflow-results/prepare-execution-summary.py
@@ -10,7 +10,7 @@
def job_succeeded(job):
# The job was successful if the artifact file 'dispatch-job-success/dispatch-job-success-' exists:
- return os.path.exists(f'dispatch-job-success/dispatch-job-success-{job["id"]}')
+ return os.path.exists(f'dispatch-job-success/{job["id"]}')
def natural_sort_key(key):
@@ -18,6 +18,18 @@ def natural_sort_key(key):
return [(int(text) if text.isdigit() else text.lower()) for text in re.split('(\d+)', key)]
+# Print the prepared text summary to the file at the given path
+def write_text(filepath, summary):
+ with open(filepath, 'w') as f:
+ print(summary, file=f)
+
+
+# Print the prepared JSON object to the file at the given path
+def write_json(filepath, json_object):
+ with open(filepath, 'w') as f:
+ json.dump(json_object, f, indent=4)
+
+
def extract_jobs(workflow):
jobs = []
for group_name, group in workflow.items():
@@ -101,7 +113,7 @@ def rank_tag(tag_summary):
return summary
-def print_summary_open(summary):
+def get_summary_heading(summary):
passed = summary['passed']
failed = summary['failed']
total = passed + failed
@@ -113,18 +125,10 @@ def print_summary_open(summary):
else:
flag = 'π©'
- summary_header = f'{flag} CI Results [ Failed: {failed} | Passed: {passed} | Total: {total} ]'
- print(f'{summary_header}
')
- print('')
- print()
+ return f'{flag} CI Results [ Failed: {failed} | Passed: {passed} | Total: {total} ]'
-def print_summary_close():
- print('
')
- print(' ')
-
-
-def print_project_open(project, project_summary):
+def get_project_heading(project, project_summary):
if project_summary['passed'] == 0:
flag = 'π₯'
elif project_summary['failed'] > 0:
@@ -136,21 +140,10 @@ def print_project_open(project, project_summary):
failed = project_summary['failed']
total = project_summary['failed'] + project_summary['passed']
- project_summary = f'{flag} Project {project} [ Failed: {failed} | Passed: {passed} | Total: {total} ]'
- print(f'{project_summary}
')
- print()
- print('```')
-
-
-def print_project_close():
- print('```')
- print()
- print(' ')
- print('')
- print()
+ return f'{flag} Project {project} [ Failed: {failed} | Passed: {passed} | Total: {total} ]'
-def print_tag_line(tag, tag_summary):
+def get_tag_line(tag, tag_summary):
passed = tag_summary['passed']
failed = tag_summary['failed']
values = tag_summary['values']
@@ -181,10 +174,10 @@ def print_tag_line(tag, tag_summary):
else:
flag = 'π©'
- print(f'{flag} {tag}{note}')
+ return f'{flag} {tag}{note}'
-def print_value_line(value, value_summary, tag_summary):
+def get_value_line(value, value_summary, tag_summary):
passed = value_summary['passed']
failed = value_summary['failed']
total = passed + failed
@@ -208,23 +201,37 @@ def print_value_line(value, value_summary, tag_summary):
percent = int(100 * failed / total)
left_aligned = f"{flag} {value} ({percent}% Fail)"
- print(f' {left_aligned:<30} Failed: {failed:^3} -- Passed: {passed:^3} -- Total: {total:^3}')
+ return f' {left_aligned:<30} Failed: {failed:^3} -- Passed: {passed:^3} -- Total: {total:^3}'
-def print_workflow_summary(workflow):
+def get_project_summary_body(project, project_summary):
+ body = ['```']
+ for tag, tag_summary in project_summary['tags'].items():
+ body.append(get_tag_line(tag, tag_summary))
+ for value, value_summary in tag_summary['values'].items():
+ body.append(get_value_line(value, value_summary, tag_summary))
+ body.append('```')
+ return "\n".join(body)
+
+
+def write_project_summary(project, project_summary):
+ heading = get_project_heading(project, project_summary)
+ body = get_project_summary_body(project, project_summary)
+
+ summary = {'heading': heading, 'body': body}
+
+ write_json(f'execution/projects/{project}_summary.json', summary)
+
+
+def write_workflow_summary(workflow):
summary = build_summary(extract_jobs(workflow))
- print_summary_open(summary)
+ os.makedirs('execution/projects', exist_ok=True)
+
+ write_text('execution/heading.txt', get_summary_heading(summary))
for project, project_summary in summary['projects'].items():
- print_project_open(project, project_summary)
- for tag, tag_summary in project_summary['tags'].items():
- print_tag_line(tag, tag_summary)
- for value, value_summary in tag_summary['values'].items():
- print_value_line(value, value_summary, tag_summary)
- print()
- print_project_close()
- print_summary_close()
+ write_project_summary(project, project_summary)
def main():
@@ -233,7 +240,7 @@ def main():
args = parser.parse_args()
workflow = json.load(args.workflow)
- print_workflow_summary(workflow)
+ write_workflow_summary(workflow)
if __name__ == '__main__':
diff --git a/.github/workflows/ci-workflow-nightly.yml b/.github/workflows/ci-workflow-nightly.yml
index cb4a1cab2ae..5585c1d6b07 100644
--- a/.github/workflows/ci-workflow-nightly.yml
+++ b/.github/workflows/ci-workflow-nightly.yml
@@ -67,7 +67,7 @@ jobs:
ci:
runs-on: ubuntu-latest
name: CI
- if: ${{ always() }} # need to use always() instead of !cancelled() because skipped jobs count as success
+ if: ${{ always() && !cancelled() }}
needs:
- compute-matrix
- dispatch-groups
diff --git a/ci/inspect_changes.sh b/ci/inspect_changes.sh
index 63fea80c431..342ce224937 100755
--- a/ci/inspect_changes.sh
+++ b/ci/inspect_changes.sh
@@ -139,7 +139,7 @@ main() {
echo
- echo "π Inspect Changes
" | tee_to_step_summary
+ echo "π Inspect Changes
" | tee_to_step_summary
echo | tee_to_step_summary
echo -e "### Modifications in project?\n" | tee_to_step_summary