Skip to content

Commit

Permalink
Merge branch 'master' into edm4hep-schema
Browse files Browse the repository at this point in the history
  • Loading branch information
lgray authored Feb 21, 2025
2 parents 6817b98 + 6b83ca1 commit b26afd1
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 9 deletions.
24 changes: 15 additions & 9 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ jobs:
java-version: [17]
java-distribution: ["corretto"]
python-version: ["3.9", "3.13"]
dask-client: ["with", "without"]

name: test coffea (${{ matrix.os }}) - python ${{ matrix.python-version }}, JDK${{ matrix.java-version }}
name: Test (${{ matrix.os }}) - py ${{ matrix.python-version }}, JDK${{ matrix.java-version }}, ${{ matrix.dask-client }} dask

steps:
- uses: actions/checkout@v4
Expand All @@ -70,10 +71,10 @@ jobs:
- name: Install dependencies (Linux)
if: matrix.os == 'ubuntu-latest'
run: |
uv pip install --system --upgrade pip setuptools wheel
uv pip install --system --upgrade pip setuptools wheel pytest-xdist
# mltool installs
# c.f. https://github.com/astral-sh/uv/issues/3437
python -m pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu # torchvision torchaudio
python -m pip install torch
uv pip install --system xgboost
uv pip install --system 'tritonclient[grpc,http]!=2.41.0'
# install checked out coffea
Expand All @@ -83,10 +84,10 @@ jobs:
- name: Install dependencies (MacOS)
if: matrix.os == 'macOS-latest'
run: |
uv pip install --system --upgrade pip setuptools wheel
uv pip install --system --upgrade pip setuptools wheel pytest-xdist
# mltool installs
# c.f. https://github.com/astral-sh/uv/issues/3437
python -m pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu # torchvision torchaudio
python -m pip install torch
uv pip install --system xgboost
# install checked out coffea
uv pip install --system -q '.[dev,dask,spark]' --upgrade
Expand All @@ -95,10 +96,10 @@ jobs:
- name: Install dependencies (Windows)
if: matrix.os == 'windows-latest'
run: |
uv pip install --system --upgrade pip setuptools wheel
uv pip install --system --upgrade pip setuptools wheel pytest-xdist
# mltool installs
# c.f. https://github.com/astral-sh/uv/issues/3437
python -m pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu # torchvision torchaudio
python -m pip install torch
uv pip install --system xgboost
# install checked out coffea
uv pip install --system -q '.[dev,dask]' --upgrade
Expand All @@ -110,9 +111,14 @@ jobs:
run: |
docker run -d --rm -p 8000:8000 -p 8001:8001 -p 8002:8002 -v ${{ github.workspace }}/tests/samples/triton_models_test:/models nvcr.io/nvidia/tritonserver:23.04-pyt-python-py3 tritonserver --model-repository=/models
- name: Test with pytest
- name: Test with pytest (${{ matrix.dask-client }} dask Client - run in parallel)
if: matrix.dask-client == 'without'
run: |
python -m pytest --cov-report=xml --cov=coffea --deselect=test_taskvine -m "not dask_client" -n 4
- name: Test with pytest (${{ matrix.dask-client }} dask Client)
if: matrix.dask-client == 'with'
run: |
python -m pytest --cov-report=xml --cov=coffea --deselect=test_taskvine
python -m pytest --cov-report=xml --cov=coffea --deselect=test_taskvine -m "dask_client"
- name: Upload codecov
if: matrix.os == 'ubuntu-latest' && matrix.python-version == 3.13
uses: codecov/codecov-action@v5
Expand Down
7 changes: 7 additions & 0 deletions tests/test_dataset_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@ def test_tuple_data_manipulation_output(allow_read_errors_with_report):
assert isinstance(j, dask_awkward.Array)


@pytest.mark.dask_client
@pytest.mark.parametrize(
"proc_and_schema",
[(NanoTestProcessor, BaseSchema), (NanoEventsProcessor, NanoAODSchema)],
Expand Down Expand Up @@ -315,6 +316,7 @@ def test_apply_to_fileset(proc_and_schema):
assert out["Data"]["cutflow"]["Data_mass"] == 14


@pytest.mark.dask_client
def test_apply_to_fileset_hinted_form():
with Client() as _:
dataset_runnable, dataset_updated = preprocess(
Expand All @@ -339,6 +341,7 @@ def test_apply_to_fileset_hinted_form():
assert out["Data"]["cutflow"]["Data_mass"] == 66


@pytest.mark.dask_client
@pytest.mark.parametrize(
"the_fileset", [_starting_fileset_list, _starting_fileset_dict, _starting_fileset]
)
Expand All @@ -356,6 +359,7 @@ def test_preprocess(the_fileset):
assert dataset_updated == _updated_result


@pytest.mark.dask_client
def test_preprocess_calculate_form():
with Client() as _:
starting_fileset = _starting_fileset
Expand All @@ -380,6 +384,7 @@ def test_preprocess_calculate_form():
assert decompress_form(dataset_runnable["Data"]["form"]) == raw_form_data


@pytest.mark.dask_client
def test_preprocess_failed_file():
with Client() as _, pytest.raises(FileNotFoundError):
starting_fileset = _starting_fileset
Expand All @@ -393,6 +398,7 @@ def test_preprocess_failed_file():
)


@pytest.mark.dask_client
def test_preprocess_with_file_exceptions():
fileset = {
"Data": {
Expand Down Expand Up @@ -582,6 +588,7 @@ def test_slice_chunks():
}


@pytest.mark.dask_client
def test_recover_failed_chunks():
with Client() as _:
to_compute = apply_to_fileset(
Expand Down
1 change: 1 addition & 0 deletions tests/test_jetmet_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -715,6 +715,7 @@ def test_jet_resolution_sf_2d(optimization_enabled):
)


@pytest.mark.dask_client
@pytest.mark.parametrize("optimization_enabled", [True, False])
def test_corrected_jets_factory(optimization_enabled):
import os
Expand Down
3 changes: 3 additions & 0 deletions tests/test_lumi_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ def test_lumidata():
assert len(results["index"][lumidata]) == len(results["index"][lumidata_pickle])


@pytest.mark.dask_client
@pytest.mark.parametrize(
"jsonfile",
[
Expand Down Expand Up @@ -164,6 +165,7 @@ def test_lumilist_dask():
assert abs(lumi3 - (lumi1 + lumi2)) < 1e-4


@pytest.mark.dask_client
def test_lumilist_client_fromfile():
with Client() as _:
events = NanoEventsFactory.from_root(
Expand All @@ -177,6 +179,7 @@ def test_lumilist_client_fromfile():
assert result.to_list() == [[1, 13889]]


@pytest.mark.dask_client
def test_1259_avoid_pickle_numba_dict():

runs_eager = ak.Array([368229, 368229, 368229, 368229])
Expand Down
4 changes: 4 additions & 0 deletions tests/test_ml_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def my_pad(arr):
}


@pytest.mark.dask_client
def test_triton():
_ = pytest.importorskip("tritonclient")

Expand Down Expand Up @@ -128,6 +129,7 @@ def prepare_awkward(self, output_list, jets):
client.close()


@pytest.mark.dask_client
def test_torch():
_ = pytest.importorskip("torch")

Expand Down Expand Up @@ -173,6 +175,7 @@ def prepare_awkward(self, jets):
client.close()


@pytest.mark.dask_client
def test_tensorflow():
_ = pytest.importorskip("tensorflow")

Expand Down Expand Up @@ -248,6 +251,7 @@ def prepare_awkward(self, arr):
client.close()


@pytest.mark.dask_client
def test_xgboost():
_ = pytest.importorskip("xgboost")

Expand Down
1 change: 1 addition & 0 deletions tests/test_nanoevents.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ def test_missing_eventIds_warning(tests_directory):
factory.events()


@pytest.mark.dask_client
def test_missing_eventIds_warning_dask(tests_directory):
path = f"{tests_directory}/samples/missing_luminosityBlock.root:Events"
NanoAODSchema.error_missing_event_ids = False
Expand Down
3 changes: 3 additions & 0 deletions tests/test_skyhook_job.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import os

import awkward as ak
import pytest
import toml
import uproot

from coffea import processor
from coffea.nanoevents import schemas
from coffea.processor.test_items import NanoEventsProcessor

pytestmark = pytest.mark.dask_client

if __name__ == "__main__":
config_dict = {
"skyhook": {
Expand Down

0 comments on commit b26afd1

Please sign in to comment.