Skip to content

Commit

Permalink
WIP testing new ci workflows
Browse files Browse the repository at this point in the history
  • Loading branch information
alliepiper committed May 9, 2024
1 parent 7c0fbaf commit 606f7c1
Show file tree
Hide file tree
Showing 20 changed files with 424 additions and 166 deletions.
186 changes: 145 additions & 41 deletions .github/actions/workflow-build/build-workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,25 @@
import argparse
import base64
import copy
import functools
import json
import os
import re
import struct
import sys
import yaml


matrix_yaml = None


# Decorators to cache static results of functions:
# static_result: function has no args, same result each invocation.
# memoize_result: result depends on args.
def static_result(func): return functools.lru_cache(maxsize=1)(func)
def memoize_result(func): return functools.lru_cache(maxsize=None)(func)


def generate_guids():
"""
Simple compact global unique ID generator.
Expand Down Expand Up @@ -106,6 +115,7 @@ def error_message_with_matrix_job(matrix_job, message):
return f"{matrix_job['origin']['workflow_location']}: {message}\n Input: {matrix_job['origin']['original_matrix_job']}"


@static_result
def get_all_matrix_job_tags_sorted():
required_tags = set(matrix_yaml['required_tags'])
defaulted_tags = set(matrix_yaml['defaulted_tags'])
Expand Down Expand Up @@ -148,6 +158,13 @@ def lookup_supported_stds(device_compiler=None, host_compiler=None):
return sorted(list(stds))


@memoize_result
def lookup_job_invoke_spec(job_type):
if job_type in matrix_yaml['job_invoke']:
return matrix_yaml['job_invoke'][job_type]
return {'prefix': job_type}


def get_formatted_project_name(project_name):
if project_name in matrix_yaml['formatted_project_names']:
return matrix_yaml['formatted_project_names'][project_name]
Expand Down Expand Up @@ -236,20 +253,25 @@ def generate_dispatch_job_image(matrix_job, job_type):
def generate_dispatch_job_command(matrix_job, job_type):
script_path = "ci/windows" if is_windows(matrix_job) else "ci"
script_ext = ".ps1" if is_windows(matrix_job) else ".sh"
script_job_type = job_type
script_project = matrix_job['project']
script_name = f"{script_path}/{script_job_type}_{script_project}{script_ext}"

job_invoke_spec = lookup_job_invoke_spec(job_type)
job_prefix = job_invoke_spec['prefix']
job_args = job_invoke_spec['args'] if 'args' in job_invoke_spec else ""

project = matrix_job['project']
script_name = f"{script_path}/{job_prefix}_{project}{script_ext}"

std_str = str(matrix_job['std']) if 'std' in matrix_job else ''

host_compiler_exe = matrix_job['cxx']['exe']
device_compiler_name = matrix_job['cudacxx']['name']
device_compiler_exe = matrix_job['cudacxx']['exe']

cuda_compile_arch = matrix_job['sm'] if 'sm' in matrix_job else ''
cmake_options = matrix_job['cmake_options'] if 'cmake_options' in matrix_job else ''

command = f"\"{script_name}\""
if job_args:
command += f" {job_args}"
if std_str:
command += f" -std \"{std_str}\""
if cuda_compile_arch:
Expand Down Expand Up @@ -319,20 +341,21 @@ def generate_dispatch_group_jobs(matrix_job):
"two_stage": []
}

# The jobs tag is left unexploded to optimize scheduling here.
job_types = set(matrix_job['jobs'])

# Identify jobs that require a build job to run first:
build_required = set(matrix_yaml['build_required_jobs']) & job_types
has_build_and_test = len(build_required) > 0
job_types -= build_required

has_standalone_build = 'build' in job_types and not has_build_and_test
job_types -= {'build'}
if build_required and not 'build' in job_types:
raise Exception(error_message_with_matrix_job(
matrix_job, f"Internal error: Missing 'build' job type required by other jobs ({build_required})."))

if has_standalone_build:
dispatch_group_jobs['standalone'].append(generate_dispatch_job_json(matrix_job, "build"))
elif has_build_and_test:
if build_required:
dispatch_group_jobs['two_stage'].append(
generate_dispatch_build_and_test_json(matrix_job, "build", build_required))
generate_dispatch_build_and_test_json(matrix_job, "build", list(build_required)))
job_types -= {'build'}
job_types -= build_required

# Remaining jobs are assumed to be standalone (e.g. nvrtc):
for job_type in job_types:
Expand Down Expand Up @@ -524,24 +547,80 @@ def remove_skip_test_jobs(matrix_jobs):
jobs = matrix_job['jobs']
new_jobs = set()
for job in jobs:
if job in matrix_yaml['skip_test_jobs']:
# If a skipped test job is a build_required_job, replace it with the 'build' job.
if job in matrix_yaml['build_required_jobs']:
# Replace with the prerequisite build job:
new_jobs.add('build')
# If a skipped test job is not a build_required_job, ignore it.
else:
pass # Ignore the job
else:
if not job in matrix_yaml['skip_test_jobs']:
new_jobs.add(job)
# If no jobs remain, skip this matrix job.
if new_jobs:
new_matrix_job = copy.deepcopy(matrix_job)
new_matrix_job['jobs'] = list(new_jobs)
new_matrix_jobs.append(new_matrix_job)
return new_matrix_jobs


@static_result
def get_excluded_matrix_jobs():
return parse_workflow_matrix_jobs(None, 'exclude')


def apply_matrix_job_exclusion(matrix_job, exclusion):
# Excluded tags to remove from unexploded tag categories: { tag: [exluded_value1, excluded_value2] }
update_dict = {}

for tag, excluded_values in exclusion.items():
# Not excluded if a specified tag isn't even present:
if not tag in matrix_job:
return matrix_job

# print(f"tag: {tag}, excluded_values: {excluded_values}")

# Some tags are left unexploded (e.g. 'jobs') to optimize scheduling,
# so the values can be either a list or a single value.
# Standardize to a list for comparison:
if type(excluded_values) != list:
excluded_values = [excluded_values]
matrix_values = matrix_job[tag]
if type(matrix_values) != list:
matrix_values = [matrix_values]

# Identify excluded values that are present in the matrix job for this tag:
matched_tag_values = [value for value in matrix_values if value in excluded_values]
# Not excluded if no values match for a tag:
if not matched_tag_values:
return matrix_job

# If there is only a partial match to the matrix values, record the matches in the update_dict.
# If the match is complete, do nothing.
if len(matched_tag_values) < len(matrix_values):
update_dict[tag] = matched_tag_values

# If we get here, the matrix job matches and should be updated or excluded entirely.
# If all tag matches are complete, then update_dict will be empty and the job should be excluded entirely
if not update_dict:
return None

# If update_dict is populated, remove the matched values from the matrix job and return it.
new_matrix_job = copy.deepcopy(matrix_job)
for tag, values in update_dict.items():
for value in values:
new_matrix_job[tag].remove(value)

return new_matrix_job


def remove_excluded_jobs(matrix_jobs):
'''Remove jobs that match all tags in any of the exclusion matrix jobs.'''
excluded = get_excluded_matrix_jobs()
filtered_matrix_jobs = []
for matrix_job_orig in matrix_jobs:
matrix_job = copy.deepcopy(matrix_job_orig)
for exclusion in excluded:
matrix_job = apply_matrix_job_exclusion(matrix_job, exclusion)
if not matrix_job:
break
if matrix_job:
filtered_matrix_jobs.append(matrix_job)
return filtered_matrix_jobs


def validate_required_tags(matrix_job):
for tag in matrix_yaml['required_tags']:
if tag not in matrix_job:
Expand Down Expand Up @@ -590,6 +669,10 @@ def set_derived_tags(matrix_job):
matrix_job['jobs'].remove('test')
matrix_job['jobs'] += matrix_yaml['project_expanded_tests'][matrix_job['project']]

if (not 'build' in matrix_job['jobs'] and
any([job in matrix_job['jobs'] for job in matrix_yaml['build_required_jobs']])):
matrix_job['jobs'].append('build')


def next_explode_tag(matrix_job):
for tag in matrix_job:
Expand All @@ -614,20 +697,29 @@ def explode_tags(matrix_job, explode_tag=None):
return result


def preprocess_matrix_jobs(matrix_jobs):
def preprocess_matrix_jobs(matrix_jobs, explode_only=False):
result = []
for matrix_job in matrix_jobs:
validate_required_tags(matrix_job)
set_default_tags(matrix_job)
for job in explode_tags(matrix_job):
set_derived_tags(job)
# The derived tags may need to be exploded again:
result.extend(explode_tags(job))
if explode_only:
for matrix_job in matrix_jobs:
result.extend(explode_tags(matrix_job))
else:
for matrix_job in matrix_jobs:
validate_required_tags(matrix_job)
set_default_tags(matrix_job)
for job in explode_tags(matrix_job):
set_derived_tags(job)
# The derived tags may need to be exploded again:
result.extend(explode_tags(job))
return result


def parse_workflow_matrix_jobs(args, workflow_name):
# Special handling for exclusion matrix: don't validate, add default, etc. Only explode.
is_exclusion_matrix = (workflow_name == 'exclude')

if not workflow_name in matrix_yaml['workflows']:
if (is_exclusion_matrix): # Valid, no exclusions if not defined
return []
raise Exception(f"Workflow '{workflow_name}' not found in matrix file '{matrix_yaml['filename']}'")

matrix_jobs = matrix_yaml['workflows'][workflow_name]
Expand All @@ -638,17 +730,23 @@ def parse_workflow_matrix_jobs(args, workflow_name):

# Tag with the original matrix info, location, etc. for error messages and post-processing.
# Do this first so the original tags / order /idx match the inpt object exactly.
for idx, matrix_job in enumerate(matrix_jobs):
workflow_location = f"{matrix_yaml['filename']}:{workflow_line_number} (job {idx + 1})"
matrix_job['origin'] = get_matrix_job_origin(matrix_job, workflow_name, workflow_location)
if not is_exclusion_matrix:
for idx, matrix_job in enumerate(matrix_jobs):
workflow_location = f"{matrix_yaml['filename']}:{workflow_line_number} (job {idx + 1})"
matrix_job['origin'] = get_matrix_job_origin(matrix_job, workflow_name, workflow_location)

# Fill in default values, explode lists.
matrix_jobs = preprocess_matrix_jobs(matrix_jobs)
matrix_jobs = preprocess_matrix_jobs(matrix_jobs, explode_only=is_exclusion_matrix)

if args:
if args.skip_tests:
matrix_jobs = remove_skip_test_jobs(matrix_jobs)
if args.dirty_projects:
matrix_jobs = [job for job in matrix_jobs if job['project'] in args.dirty_projects]

if args.skip_tests:
matrix_jobs = remove_skip_test_jobs(matrix_jobs)
if args.dirty_projects:
matrix_jobs = [job for job in matrix_jobs if job['project'] in args.dirty_projects]
# Don't remove excluded jobs if we're currently parsing them:
if not is_exclusion_matrix:
matrix_jobs = remove_excluded_jobs(matrix_jobs)

# Sort the tags by, *ahem*, "importance":
sorted_tags = get_all_matrix_job_tags_sorted()
Expand All @@ -671,7 +769,7 @@ def parse_workflow_dispatch_groups(args, workflow_name):
matrix_job_dispatch_group = matrix_job_to_dispatch_group(matrix_job, group_prefix)
merge_dispatch_groups(workflow_dispatch_groups, matrix_job_dispatch_group)

return finalize_workflow_dispatch_groups(workflow_dispatch_groups)
return workflow_dispatch_groups


def write_outputs(final_workflow):
Expand Down Expand Up @@ -741,15 +839,21 @@ def print_gha_workflow(args):
workflow_dispatch_groups = parse_workflow_dispatch_groups(args, workflow_name)
merge_dispatch_groups(final_workflow, workflow_dispatch_groups)

final_workflow = finalize_workflow_dispatch_groups(final_workflow)

write_outputs(final_workflow)


def print_devcontainer_info(args):
devcontainer_version = matrix_yaml['devcontainer_version']

matrix_jobs = []
for workflow in matrix_yaml['workflows']:
matrix_jobs.extend(parse_workflow_matrix_jobs(args, workflow))

# Remove the `exclude` and `override` entries:
ignored_matrix_keys = ['exclude', 'override']
workflow_names = [key for key in matrix_yaml['workflows'].keys() if key not in ignored_matrix_keys]
for workflow_name in workflow_names:
matrix_jobs.extend(parse_workflow_matrix_jobs(args, workflow_name))

# Remove all but the following keys from the matrix jobs:
keep_keys = ['ctk', 'cxx', 'os']
Expand Down
18 changes: 9 additions & 9 deletions CMakePresets.json
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@
"inherits": "base"
},
{
"name": "cub-gpu-base",
"name": "cub-nolid-base",
"hidden": true,
"inherits": "cub-base",
"filter": {
Expand Down Expand Up @@ -598,24 +598,24 @@
}
},
{
"name": "cub-gpu-cpp11",
"name": "cub-nolid-cpp11",
"configurePreset": "cub-cpp11",
"inherits": "cub-gpu-base"
"inherits": "cub-nolid-base"
},
{
"name": "cub-gpu-cpp14",
"name": "cub-nolid-cpp14",
"configurePreset": "cub-cpp14",
"inherits": "cub-gpu-base"
"inherits": "cub-nolid-base"
},
{
"name": "cub-gpu-cpp17",
"name": "cub-nolid-cpp17",
"configurePreset": "cub-cpp17",
"inherits": "cub-gpu-base"
"inherits": "cub-nolid-base"
},
{
"name": "cub-gpu-cpp20",
"name": "cub-nolid-cpp20",
"configurePreset": "cub-cpp20",
"inherits": "cub-gpu-base"
"inherits": "cub-nolid-base"
},
{
"name": "cub-lid0-cpp11",
Expand Down
16 changes: 15 additions & 1 deletion ci/build_common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ set -eo pipefail
cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";

# Script defaults
VERBOSE=${VERBOSE:-}
HOST_COMPILER=${CXX:-g++} # $CXX if set, otherwise `g++`
CXX_STANDARD=17
CUDA_COMPILER=${CUDACXX:-nvcc} # $CUDACXX if set, otherwise `nvcc`
Expand Down Expand Up @@ -122,7 +123,8 @@ print_environment_details() {
CMAKE_BUILD_PARALLEL_LEVEL \
CTEST_PARALLEL_LEVEL \
CCCL_BUILD_INFIX \
GLOBAL_CMAKE_OPTIONS
GLOBAL_CMAKE_OPTIONS \
TBB_ROOT

echo "Current commit is:"
git log -1 || echo "Not a repository"
Expand All @@ -133,6 +135,18 @@ print_environment_details() {
echo "nvidia-smi not found"
fi

if command -v cmake &> /dev/null; then
cmake --version
else
echo "cmake not found"
fi

if command -v ctest &> /dev/null; then
ctest --version
else
echo "ctest not found"
fi

end_group "⚙️ Environment Details"
}

Expand Down
Loading

0 comments on commit 606f7c1

Please sign in to comment.