From fc4d8a1d49325cad28e7633cf2fe0b53ff8319b3 Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Fri, 24 Nov 2023 18:46:33 -0500 Subject: [PATCH 1/6] [ENH] Add tests for nulls.py --- neuromaps/nulls/tests/test_nulls.py | 135 +++++++++++++++++++++++++++- 1 file changed, 134 insertions(+), 1 deletion(-) diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index fa634883..792f191f 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -2,25 +2,158 @@ """For testing neuromaps.nulls.nulls functionality.""" import pytest +from neuromaps.datasets import fetch_annotation, available_annotations +from neuromaps.parcellate import Parcellater +from neuromaps.images import annot_to_gifti, dlabel_to_gifti +from netneurotools.datasets import fetch_schaefer2018, fetch_cammoun2012 + +sample_surface_maps = [ + ('abagen', 'genepc1', 'fsaverage', '10k'), + ('hcps1200', 'myelinmap', 'fsLR', '32k'), +] +sample_volume_maps = [ + ('neurosynth', 'cogpc1', 'MNI152', '2mm'), + ('dukart2018', 'flumazenil', 'MNI152', '3mm'), +] + +sample_surface_parcellations = [ + ("schaefer100x7", fetch_schaefer2018, '100Parcels7Networks'), + ("schaefer200x7", fetch_schaefer2018, '200Parcels7Networks') +] + +sample_volume_parcellations = [ + ("lausanne033", fetch_cammoun2012, 'scale033'), + ("lausanne060", fetch_cammoun2012, 'scale060') +] + +@pytest.fixture( + scope="module", + params=sample_surface_maps, + ids=["_".join(_) for _ in sample_surface_maps] +) +def sample_surface(request): + source, desc, space, den = request.param + annot = fetch_annotation( + source=source, desc=desc, space=space, den=den + ) + return request.param, annot + +@pytest.fixture( + scope="module", + params=sample_volume_maps, + ids=["_".join(_) for _ in sample_volume_maps] +) +def sample_volume(request): + source, desc, space, res = request.param + annot = fetch_annotation( + source=source, desc=desc, space=space, res=res + ) + return request.param, annot + +@pytest.fixture( + scope="module", + params=sample_surface_parcellations, + ids=[_[0] for _ in sample_surface_parcellations] +) +def sample_surface_parcellated(sample_surface, request): + surf_tuple, annot = sample_surface + source, desc, space, den = surf_tuple + + if request.param[0].startswith("schaefer"): + parc_name, parc_fetcher, parc_label = request.param + if space == "fsaverage": + atlas = annot_to_gifti(parc_fetcher(version="fsaverage")[parc_label]) + elif space == "fsLR": + atlas = dlabel_to_gifti(parc_fetcher(version="fslr32k")[parc_label]) + else: + raise NotImplementedError(f"Invalid surface space: {space}") + parc = Parcellater(atlas, space) + else: + raise NotImplementedError(f"Invalid parcellation: {request.param[0]}") + + annot_parc = parc.fit_transform(annot, space) + + return surf_tuple, parc_name, annot_parc + + +@pytest.fixture( + scope="module", + params=sample_volume_parcellations, + ids=[_[0] for _ in sample_volume_parcellations] +) +def sample_volume_parcellated(sample_volume, request): + vol_tuple, annot = sample_volume + source, desc, space, res = vol_tuple + + if request.param[0].startswith("lausanne"): + parc_name, parc_fetcher, parc_label = request.param + atlas = parc_fetcher(version="MNI152NLin2009aSym")[parc_label] + parc = Parcellater(atlas, space) + else: + raise NotImplementedError(f"Invalid parcellation: {request.param[0]}") + + annot_parc = parc.fit_transform(annot, space) + + return vol_tuple, parc_name, annot_parc + + +def test_fixture_surface_smoke(sample_surface): + # print(sample_surface[0]) + pass + +def test_fixture_volume_smoke(sample_volume): + # print(sample_volume[0]) + pass + +# def test_fixture_surface_parcellated_smoke(sample_surface, sample_parcellation): +# print(sample_surface[0], sample_parcellation) + +# def test_fixture_volume_parcellated_smoke(sample_volume, sample_parcellation): +# print(sample_volume[0], sample_parcellation) + + +def test_fixture_surface_parcellated_smoke(sample_surface_parcellated): + surf_tuple, parc_name, annot_parc = sample_surface_parcellated + # print(surf_tuple, parc_name, annot_parc.shape[0]) + pass + +def test_fixture_volume_parcellated_smoke(sample_volume_parcellated): + vol_tuple, parc_name, annot_parc = sample_volume_parcellated + # print(vol_tuple, parc_name, annot_parc.shape[0]) + pass @pytest.mark.xfail -def test_alexander_bloch(): +def test_alexander_bloch(sample_surface): """Test alexander-bloch null model.""" assert False +@pytest.mark.xfail +def test_alexander_bloch_parcellated(sample_surface, sample_parcellation): + """Test alexander-bloch null model for parcellated maps.""" + assert False + @pytest.mark.xfail def test_vasa(): """Test vasa null model.""" assert False +@pytest.mark.xfail +def test_vasa_parcellated(): + """Test vasa null model for parcellated maps.""" + assert False + @pytest.mark.xfail def test_hungarian(): """Test hungarian null model.""" assert False +@pytest.mark.xfail +def test_hungarian_parcellated(): + """Test hungarian null model for parcellated maps.""" + assert False @pytest.mark.xfail def test_baum(): From 77116b9ff51f410d18604fd7b4496adaac16fc79 Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Fri, 24 Nov 2023 21:39:59 -0500 Subject: [PATCH 2/6] [FIX] Fix deprecations --- neuromaps/datasets/_osf.py | 16 ++++++++++++---- neuromaps/datasets/tests/test__osf.py | 13 ++++++++++--- neuromaps/datasets/utils.py | 17 ++++++++++++++--- neuromaps/nulls/tests/test_nulls.py | 1 + neuromaps/parcellate.py | 7 +++++++ 5 files changed, 44 insertions(+), 10 deletions(-) diff --git a/neuromaps/datasets/_osf.py b/neuromaps/datasets/_osf.py index 15a2fc63..3b63a397 100644 --- a/neuromaps/datasets/_osf.py +++ b/neuromaps/datasets/_osf.py @@ -2,7 +2,12 @@ """Functions for working with data/osf.json file.""" import os -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import json from nilearn.datasets.utils import _md5_sum_file @@ -23,9 +28,12 @@ INFO_KEYS = ['source', 'refs', 'comments', 'demographics'] # distribution JSON -OSFJSON = resource_filename( - 'neuromaps', os.path.join('datasets', 'data', 'osf.json') -) +if _importlib_avail: + OSFJSON = importlib.resources.files("neuromaps") / "datasets/data/osf.json" +else: + OSFJSON = resource_filename( + 'neuromaps', os.path.join('datasets', 'data', 'osf.json') + ) def parse_filename(fname, return_ext=True, verbose=False): diff --git a/neuromaps/datasets/tests/test__osf.py b/neuromaps/datasets/tests/test__osf.py index 0a410c44..7d3cf479 100644 --- a/neuromaps/datasets/tests/test__osf.py +++ b/neuromaps/datasets/tests/test__osf.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """For testing neuromaps.datasets._osf functionality.""" -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import pytest @@ -21,8 +26,10 @@ def test_parse_fname_list(): def test_parse_json(): - """Test parsing a JSON file.""" - osf = resource_filename('neuromaps', 'datasets/data/osf.json') + if _importlib_avail: + osf = importlib.resources.files("neuromaps") / "datasets/data/osf.json" + else: + osf = resource_filename('neuromaps', 'datasets/data/osf.json') out = _osf.parse_json(osf) assert isinstance(out, list) and all(isinstance(i, dict) for i in out) diff --git a/neuromaps/datasets/utils.py b/neuromaps/datasets/utils.py index 39debb60..d25d6cc9 100644 --- a/neuromaps/datasets/utils.py +++ b/neuromaps/datasets/utils.py @@ -3,7 +3,12 @@ import json import os -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import requests @@ -70,8 +75,14 @@ def get_dataset_info(name, return_restricted=True): dataset : dict or list-of-dict Information on requested data """ - fn = resource_filename('neuromaps', - os.path.join('datasets', 'data', 'osf.json')) + if _importlib_avail: + fn = importlib.resources.files("neuromaps") / "datasets/data/osf.json" + else: + fn = resource_filename( + 'neuromaps', + os.path.join('datasets', 'data', 'osf.json') + ) + with open(fn) as src: osf_resources = _osfify_urls(json.load(src), return_restricted) diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index 792f191f..732d1b63 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -117,6 +117,7 @@ def test_fixture_surface_parcellated_smoke(sample_surface_parcellated): # print(surf_tuple, parc_name, annot_parc.shape[0]) pass +@pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 def test_fixture_volume_parcellated_smoke(sample_volume_parcellated): vol_tuple, parc_name, annot_parc = sample_volume_parcellated # print(vol_tuple, parc_name, annot_parc.shape[0]) diff --git a/neuromaps/parcellate.py b/neuromaps/parcellate.py index 2dd2f34a..67089057 100644 --- a/neuromaps/parcellate.py +++ b/neuromaps/parcellate.py @@ -2,7 +2,14 @@ """Functionality for parcellating data.""" import nibabel as nib +<<<<<<< HEAD from nilearn.maskers import NiftiLabelsMasker +======= +try: + from nilearn.maskers import NiftiLabelsMasker +except: + from nilearn.input_data import NiftiLabelsMasker +>>>>>>> 276cdaf ([FIX] Fix deprecations) from nilearn.image import new_img_like from nilearn.masking import compute_background_mask import numpy as np From 59edf5ccdf76c8407e37bed7928d24ebcc3694ea Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Thu, 30 Nov 2023 16:49:14 -0500 Subject: [PATCH 3/6] [FIX] Styles --- neuromaps/datasets/tests/test__osf.py | 1 + neuromaps/nulls/tests/test_nulls.py | 17 +++++++++-------- neuromaps/parcellate.py | 6 +----- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/neuromaps/datasets/tests/test__osf.py b/neuromaps/datasets/tests/test__osf.py index 7d3cf479..9de0ec48 100644 --- a/neuromaps/datasets/tests/test__osf.py +++ b/neuromaps/datasets/tests/test__osf.py @@ -26,6 +26,7 @@ def test_parse_fname_list(): def test_parse_json(): + """Test parsing a JSON file.""" if _importlib_avail: osf = importlib.resources.files("neuromaps") / "datasets/data/osf.json" else: diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index 732d1b63..1e17983d 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -2,7 +2,7 @@ """For testing neuromaps.nulls.nulls functionality.""" import pytest -from neuromaps.datasets import fetch_annotation, available_annotations +from neuromaps.datasets import fetch_annotation from neuromaps.parcellate import Parcellater from neuromaps.images import annot_to_gifti, dlabel_to_gifti from netneurotools.datasets import fetch_schaefer2018, fetch_cammoun2012 @@ -32,6 +32,7 @@ ids=["_".join(_) for _ in sample_surface_maps] ) def sample_surface(request): + """Fixture for surface annotation.""" source, desc, space, den = request.param annot = fetch_annotation( source=source, desc=desc, space=space, den=den @@ -44,6 +45,7 @@ def sample_surface(request): ids=["_".join(_) for _ in sample_volume_maps] ) def sample_volume(request): + """Fixture for volume annotation.""" source, desc, space, res = request.param annot = fetch_annotation( source=source, desc=desc, space=space, res=res @@ -56,6 +58,7 @@ def sample_volume(request): ids=[_[0] for _ in sample_surface_parcellations] ) def sample_surface_parcellated(sample_surface, request): + """Fixture for parcellated surface annotation.""" surf_tuple, annot = sample_surface source, desc, space, den = surf_tuple @@ -82,6 +85,7 @@ def sample_surface_parcellated(sample_surface, request): ids=[_[0] for _ in sample_volume_parcellations] ) def sample_volume_parcellated(sample_volume, request): + """Fixture for parcellated volume annotation.""" vol_tuple, annot = sample_volume source, desc, space, res = vol_tuple @@ -98,27 +102,24 @@ def sample_volume_parcellated(sample_volume, request): def test_fixture_surface_smoke(sample_surface): + """Test fetching surface annotation.""" # print(sample_surface[0]) pass def test_fixture_volume_smoke(sample_volume): + """Test fetching volume annotation.""" # print(sample_volume[0]) pass -# def test_fixture_surface_parcellated_smoke(sample_surface, sample_parcellation): -# print(sample_surface[0], sample_parcellation) - -# def test_fixture_volume_parcellated_smoke(sample_volume, sample_parcellation): -# print(sample_volume[0], sample_parcellation) - - def test_fixture_surface_parcellated_smoke(sample_surface_parcellated): + """Test fetching parcellated surface annotation.""" surf_tuple, parc_name, annot_parc = sample_surface_parcellated # print(surf_tuple, parc_name, annot_parc.shape[0]) pass @pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 def test_fixture_volume_parcellated_smoke(sample_volume_parcellated): + """Test fetching parcellated volume annotation.""" vol_tuple, parc_name, annot_parc = sample_volume_parcellated # print(vol_tuple, parc_name, annot_parc.shape[0]) pass diff --git a/neuromaps/parcellate.py b/neuromaps/parcellate.py index 67089057..bc37cb37 100644 --- a/neuromaps/parcellate.py +++ b/neuromaps/parcellate.py @@ -2,14 +2,10 @@ """Functionality for parcellating data.""" import nibabel as nib -<<<<<<< HEAD -from nilearn.maskers import NiftiLabelsMasker -======= try: from nilearn.maskers import NiftiLabelsMasker -except: +except ImportError: from nilearn.input_data import NiftiLabelsMasker ->>>>>>> 276cdaf ([FIX] Fix deprecations) from nilearn.image import new_img_like from nilearn.masking import compute_background_mask import numpy as np From 101ab024dcd69d7dc1f583663418560a7cf43720 Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Thu, 30 Nov 2023 21:30:12 -0500 Subject: [PATCH 4/6] [ENH] Add tests --- neuromaps/datasets/_osf.py | 15 +- neuromaps/datasets/tests/test__osf.py | 13 +- neuromaps/datasets/utils.py | 10 +- neuromaps/nulls/tests/test_nulls.py | 315 ++++++++++++++++++++------ neuromaps/tests/conftest.py | 5 + 5 files changed, 272 insertions(+), 86 deletions(-) diff --git a/neuromaps/datasets/_osf.py b/neuromaps/datasets/_osf.py index 3b63a397..aab37b23 100644 --- a/neuromaps/datasets/_osf.py +++ b/neuromaps/datasets/_osf.py @@ -2,18 +2,17 @@ """Functions for working with data/osf.json file.""" import os -try: - import importlib.resources - _importlib_avail = True -except ImportError: - from pkg_resources import resource_filename - _importlib_avail = False import json - +import importlib.resources from nilearn.datasets.utils import _md5_sum_file - from neuromaps.datasets.utils import _get_session +if getattr(importlib.resources, 'files', None) is not None: + _importlib_avail = True +else: + from pkg_resources import resource_filename + _importlib_avail = False + # uniquely identify each item ('hemi' can be None) FNAME_KEYS = ['source', 'desc', 'space', 'den', 'res', 'hemi'] # auto-generated (checksum can be None if file does not exist) diff --git a/neuromaps/datasets/tests/test__osf.py b/neuromaps/datasets/tests/test__osf.py index 9de0ec48..4485f622 100644 --- a/neuromaps/datasets/tests/test__osf.py +++ b/neuromaps/datasets/tests/test__osf.py @@ -1,17 +1,16 @@ # -*- coding: utf-8 -*- """For testing neuromaps.datasets._osf functionality.""" -try: - import importlib.resources +import pytest +import importlib.resources +from neuromaps.datasets import _osf + +if getattr(importlib.resources, 'files', None) is not None: _importlib_avail = True -except ImportError: +else: from pkg_resources import resource_filename _importlib_avail = False -import pytest - -from neuromaps.datasets import _osf - @pytest.mark.xfail def test_parse_filename(): diff --git a/neuromaps/datasets/utils.py b/neuromaps/datasets/utils.py index d25d6cc9..f44b911e 100644 --- a/neuromaps/datasets/utils.py +++ b/neuromaps/datasets/utils.py @@ -3,15 +3,15 @@ import json import os -try: - import importlib.resources +import importlib.resources +import requests + +if getattr(importlib.resources, 'files', None) is not None: _importlib_avail = True -except ImportError: +else: from pkg_resources import resource_filename _importlib_avail = False -import requests - RESTRICTED = ["grh4d"] diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index 1e17983d..483012e7 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -2,6 +2,7 @@ """For testing neuromaps.nulls.nulls functionality.""" import pytest +from neuromaps.nulls.nulls import * from neuromaps.datasets import fetch_annotation from neuromaps.parcellate import Parcellater from neuromaps.images import annot_to_gifti, dlabel_to_gifti @@ -65,7 +66,16 @@ def sample_surface_parcellated(sample_surface, request): if request.param[0].startswith("schaefer"): parc_name, parc_fetcher, parc_label = request.param if space == "fsaverage": - atlas = annot_to_gifti(parc_fetcher(version="fsaverage")[parc_label]) + if den == "164k": + atlas = annot_to_gifti(parc_fetcher(version="fsaverage")[parc_label]) + elif den == "41k": + atlas = annot_to_gifti(parc_fetcher(version="fsaverage6")[parc_label]) + elif den == "10k": + atlas = annot_to_gifti(parc_fetcher(version="fsaverage5")[parc_label]) + else: + raise NotImplementedError( + f"Invalid surface density: {den} for fsaverage space" + ) elif space == "fsLR": atlas = dlabel_to_gifti(parc_fetcher(version="fslr32k")[parc_label]) else: @@ -76,7 +86,7 @@ def sample_surface_parcellated(sample_surface, request): annot_parc = parc.fit_transform(annot, space) - return surf_tuple, parc_name, annot_parc + return surf_tuple, parc_name, atlas, annot_parc @pytest.fixture( @@ -98,75 +108,244 @@ def sample_volume_parcellated(sample_volume, request): annot_parc = parc.fit_transform(annot, space) - return vol_tuple, parc_name, annot_parc + return vol_tuple, parc_name, atlas, annot_parc +class TestFixturesSmoke: -def test_fixture_surface_smoke(sample_surface): - """Test fetching surface annotation.""" - # print(sample_surface[0]) - pass + def test_fixture_surface_smoke(self, sample_surface): + """Test fetching surface annotation.""" + print(sample_surface[0]) + assert True -def test_fixture_volume_smoke(sample_volume): - """Test fetching volume annotation.""" - # print(sample_volume[0]) - pass -def test_fixture_surface_parcellated_smoke(sample_surface_parcellated): - """Test fetching parcellated surface annotation.""" - surf_tuple, parc_name, annot_parc = sample_surface_parcellated - # print(surf_tuple, parc_name, annot_parc.shape[0]) - pass + def test_fixture_volume_smoke(self, sample_volume): + """Test fetching volume annotation.""" + print(sample_volume[0]) + assert True -@pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 -def test_fixture_volume_parcellated_smoke(sample_volume_parcellated): - """Test fetching parcellated volume annotation.""" - vol_tuple, parc_name, annot_parc = sample_volume_parcellated - # print(vol_tuple, parc_name, annot_parc.shape[0]) - pass + def test_fixture_surface_parcellated_smoke(self, sample_surface_parcellated): + """Test fetching parcellated surface annotation.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + print(surf_tuple, parc_name, atlas, annot_parc.shape[0]) + assert True -@pytest.mark.xfail -def test_alexander_bloch(sample_surface): - """Test alexander-bloch null model.""" - assert False -@pytest.mark.xfail -def test_alexander_bloch_parcellated(sample_surface, sample_parcellation): - """Test alexander-bloch null model for parcellated maps.""" - assert False + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 + def test_fixture_volume_parcellated_smoke(self, sample_volume_parcellated): + """Test fetching parcellated volume annotation.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + print(vol_tuple, parc_name, atlas, annot_parc.shape[0]) + assert True +class TestAlexanderBloch: -@pytest.mark.xfail -def test_vasa(): - """Test vasa null model.""" - assert False + def test_alexander_bloch_surface(self, sample_surface): + """Test alexander-bloch null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + alexander_bloch(annot, atlas=space, density=den, n_perm=3) -@pytest.mark.xfail -def test_vasa_parcellated(): - """Test vasa null model for parcellated maps.""" - assert False + @pytest.mark.xfail + def test_alexander_bloch_volume(self, sample_volume): + """Test alexander-bloch null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + alexander_bloch(annot, atlas=space, density=res, n_perm=3) -@pytest.mark.xfail -def test_hungarian(): - """Test hungarian null model.""" - assert False -@pytest.mark.xfail -def test_hungarian_parcellated(): - """Test hungarian null model for parcellated maps.""" - assert False + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_alexander_bloch_surface_parcellated(self, sample_surface_parcellated): + """Test alexander-bloch null model for parcellated surface.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + _, _, space, den = surf_tuple + print(surf_tuple, parc_name, atlas, annot_parc.shape) -@pytest.mark.xfail -def test_baum(): - """Test baum null model.""" - assert False + alexander_bloch( + annot_parc, atlas=space, density=den, parcellation=atlas, n_perm=3 + ) -@pytest.mark.xfail -def test_cornblath(): - """Test cornblath null model.""" - assert False + @pytest.mark.xfail + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_alexander_bloch_volume_parcellated(self, sample_volume_parcellated): + """Test alexander-bloch null model for parcellated volume.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + _, _, space, res = vol_tuple + print(vol_tuple, parc_name, atlas, annot_parc.shape) + + alexander_bloch( + annot_parc, atlas=space, density=res, parcellation=atlas, n_perm=3 + ) + +class TestVasa: + + @pytest.mark.xfail + def test_vasa_surface(self, sample_surface): + """Test vasa null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + vasa(annot, atlas=space, density=den, n_perm=3) + + + @pytest.mark.xfail + def test_vasa_volume(self, sample_volume): + """Test vasa null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + vasa(annot, atlas=space, density=res, n_perm=3) + + + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_vasa_surface_parcellated(self, sample_surface_parcellated): + """Test vasa null model for parcellated surface.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + _, _, space, den = surf_tuple + print(surf_tuple, parc_name, atlas, annot_parc.shape) + + vasa( + annot_parc, atlas=space, density=den, parcellation=atlas, n_perm=3 + ) + + + @pytest.mark.xfail + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_vasa_volume_parcellated(self, sample_volume_parcellated): + """Test vasa null model for parcellated volume.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + _, _, space, res = vol_tuple + print(vol_tuple, parc_name, atlas, annot_parc.shape) + + vasa( + annot_parc, atlas=space, density=res, parcellation=atlas, n_perm=3 + ) + +class TestHungarian: + + @pytest.mark.xfail + def test_hungarian_surface(self, sample_surface): + """Test hungarian null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + hungarian(annot, atlas=space, density=den, n_perm=3) + + + @pytest.mark.xfail + def test_hungarian_volume(self, sample_volume): + """Test hungarian null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + hungarian(annot, atlas=space, density=res, n_perm=3) + + + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_hungarian_surface_parcellated(self, sample_surface_parcellated): + """Test hungarian null model for parcellated surface.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + _, _, space, den = surf_tuple + print(surf_tuple, parc_name, atlas, annot_parc.shape) + + hungarian( + annot_parc, atlas=space, density=den, parcellation=atlas, n_perm=3 + ) + + + @pytest.mark.xfail + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_hungarian_volume_parcellated(self, sample_volume_parcellated): + """Test hungarian null model for parcellated volume.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + _, _, space, res = vol_tuple + print(vol_tuple, parc_name, atlas, annot_parc.shape) + + hungarian( + annot_parc, atlas=space, density=res, parcellation=atlas, n_perm=3 + ) + +class TestBaum: + + @pytest.mark.xfail + def test_baum_surface(self, sample_surface): + """Test baum null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + baum(annot, atlas=space, density=den, n_perm=3) + + + @pytest.mark.xfail + def test_baum_volume(self, sample_volume): + """Test baum null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + baum(annot, atlas=space, density=res, n_perm=3) + + + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_baum_surface_parcellated(self, sample_surface_parcellated): + """Test baum null model for parcellated surface.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + _, _, space, den = surf_tuple + print(surf_tuple, parc_name, atlas, annot_parc.shape) + + baum( + annot_parc, atlas=space, density=den, parcellation=atlas, n_perm=3 + ) + + + @pytest.mark.xfail + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_baum_volume_parcellated(self, sample_volume_parcellated): + """Test baum null model for parcellated volume.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + _, _, space, res = vol_tuple + print(vol_tuple, parc_name, atlas, annot_parc.shape) + + baum( + annot_parc, atlas=space, density=res, parcellation=atlas, n_perm=3 + ) + +class TestCornblath: + + @pytest.mark.xfail + def test_cornblath_surface(self, sample_surface): + """Test cornblath null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + cornblath(annot, atlas=space, density=den, n_perm=3) + + + @pytest.mark.xfail + def test_cornblath_volume(self, sample_volume): + """Test cornblath null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + cornblath(annot, atlas=space, density=res, n_perm=3) + + + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_cornblathsurface_parcellated(self, sample_surface_parcellated): + """Test cornblath null model for parcellated surface.""" + surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated + _, _, space, den = surf_tuple + print(surf_tuple, parc_name, atlas, annot_parc.shape) + + cornblath( + annot_parc, atlas=space, density=den, parcellation=atlas, n_perm=3 + ) + + + @pytest.mark.xfail + @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + def test_cornblath_volume_parcellated(self, sample_volume_parcellated): + """Test cornblath null model for parcellated volume.""" + vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated + _, _, space, res = vol_tuple + print(vol_tuple, parc_name, atlas, annot_parc.shape) + + cornblath( + annot_parc, atlas=space, density=res, parcellation=atlas, n_perm=3 + ) @pytest.mark.xfail @@ -180,20 +359,24 @@ def test__make_surrogates(): """Test making surrogates.""" assert False +class TestBurt2018: -@pytest.mark.xfail -def test_burt2018(): - """Test burt2018 null model.""" - assert False + @pytest.mark.xfail + def test_burt2018(self): + """Test burt2018 null model.""" + assert False +class TestBurt2020: -@pytest.mark.xfail -def test_burt2020(): - """Test burt2020 null model.""" - assert False + @pytest.mark.xfail + def test_burt2020(self): + """Test burt2020 null model.""" + assert False -@pytest.mark.xfail -def test_moran(): - """Test moran null model.""" - assert False +class TestMoran: + + @pytest.mark.xfail + def test_moran(self): + """Test moran null model.""" + assert False diff --git a/neuromaps/tests/conftest.py b/neuromaps/tests/conftest.py index 0963681f..91fb5989 100644 --- a/neuromaps/tests/conftest.py +++ b/neuromaps/tests/conftest.py @@ -5,6 +5,11 @@ import pytest +def pytest_configure(config): + """Add markers for tests.""" + config.addinivalue_line( + "markers", "workbench: mark test to run with Connectome Workbench" + ) def pytest_runtest_setup(item): """Skip tests that require workbench if it's not installed.""" From 1e90a00e7a32fefc41cc2972f2d8442eb5331c84 Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Thu, 30 Nov 2023 21:36:05 -0500 Subject: [PATCH 5/6] [FIX] Style --- neuromaps/nulls/tests/test_nulls.py | 59 ++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 13 deletions(-) diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index 483012e7..dc2efec5 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -2,7 +2,9 @@ """For testing neuromaps.nulls.nulls functionality.""" import pytest -from neuromaps.nulls.nulls import * +from neuromaps.nulls.nulls import ( + alexander_bloch, vasa, hungarian, baum, cornblath +) from neuromaps.datasets import fetch_annotation from neuromaps.parcellate import Parcellater from neuromaps.images import annot_to_gifti, dlabel_to_gifti @@ -111,6 +113,7 @@ def sample_volume_parcellated(sample_volume, request): return vol_tuple, parc_name, atlas, annot_parc class TestFixturesSmoke: + """Test fixtures for null models.""" def test_fixture_surface_smoke(self, sample_surface): """Test fetching surface annotation.""" @@ -130,8 +133,10 @@ def test_fixture_surface_parcellated_smoke(self, sample_surface_parcellated): print(surf_tuple, parc_name, atlas, annot_parc.shape[0]) assert True - - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 + + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_fixture_volume_parcellated_smoke(self, sample_volume_parcellated): """Test fetching parcellated volume annotation.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -139,6 +144,7 @@ def test_fixture_volume_parcellated_smoke(self, sample_volume_parcellated): assert True class TestAlexanderBloch: + """Test alexander-bloch null model.""" def test_alexander_bloch_surface(self, sample_surface): """Test alexander-bloch null model for surface.""" @@ -155,7 +161,9 @@ def test_alexander_bloch_volume(self, sample_volume): alexander_bloch(annot, atlas=space, density=res, n_perm=3) - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_alexander_bloch_surface_parcellated(self, sample_surface_parcellated): """Test alexander-bloch null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated @@ -168,7 +176,9 @@ def test_alexander_bloch_surface_parcellated(self, sample_surface_parcellated): @pytest.mark.xfail - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # neuromaps.images.load_data() + ) def test_alexander_bloch_volume_parcellated(self, sample_volume_parcellated): """Test alexander-bloch null model for parcellated volume.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -180,6 +190,7 @@ def test_alexander_bloch_volume_parcellated(self, sample_volume_parcellated): ) class TestVasa: + """Test vasa null model.""" @pytest.mark.xfail def test_vasa_surface(self, sample_surface): @@ -197,7 +208,9 @@ def test_vasa_volume(self, sample_volume): vasa(annot, atlas=space, density=res, n_perm=3) - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_vasa_surface_parcellated(self, sample_surface_parcellated): """Test vasa null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated @@ -210,7 +223,9 @@ def test_vasa_surface_parcellated(self, sample_surface_parcellated): @pytest.mark.xfail - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # neuromaps.images.load_data() + ) def test_vasa_volume_parcellated(self, sample_volume_parcellated): """Test vasa null model for parcellated volume.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -222,6 +237,7 @@ def test_vasa_volume_parcellated(self, sample_volume_parcellated): ) class TestHungarian: + """Test hungarian null model.""" @pytest.mark.xfail def test_hungarian_surface(self, sample_surface): @@ -239,7 +255,9 @@ def test_hungarian_volume(self, sample_volume): hungarian(annot, atlas=space, density=res, n_perm=3) - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_hungarian_surface_parcellated(self, sample_surface_parcellated): """Test hungarian null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated @@ -252,7 +270,9 @@ def test_hungarian_surface_parcellated(self, sample_surface_parcellated): @pytest.mark.xfail - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # neuromaps.images.load_data() + ) def test_hungarian_volume_parcellated(self, sample_volume_parcellated): """Test hungarian null model for parcellated volume.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -264,6 +284,7 @@ def test_hungarian_volume_parcellated(self, sample_volume_parcellated): ) class TestBaum: + """Test baum null model.""" @pytest.mark.xfail def test_baum_surface(self, sample_surface): @@ -281,7 +302,9 @@ def test_baum_volume(self, sample_volume): baum(annot, atlas=space, density=res, n_perm=3) - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_baum_surface_parcellated(self, sample_surface_parcellated): """Test baum null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated @@ -294,7 +317,9 @@ def test_baum_surface_parcellated(self, sample_surface_parcellated): @pytest.mark.xfail - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # neuromaps.images.load_data() + ) def test_baum_volume_parcellated(self, sample_volume_parcellated): """Test baum null model for parcellated volume.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -306,6 +331,7 @@ def test_baum_volume_parcellated(self, sample_volume_parcellated): ) class TestCornblath: + """Test cornblath null model.""" @pytest.mark.xfail def test_cornblath_surface(self, sample_surface): @@ -323,7 +349,9 @@ def test_cornblath_volume(self, sample_volume): cornblath(annot, atlas=space, density=res, n_perm=3) - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 + ) def test_cornblathsurface_parcellated(self, sample_surface_parcellated): """Test cornblath null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated @@ -336,7 +364,9 @@ def test_cornblathsurface_parcellated(self, sample_surface_parcellated): @pytest.mark.xfail - @pytest.mark.filterwarnings("ignore::DeprecationWarning") # neuromaps.images.load_data() + @pytest.mark.filterwarnings( + "ignore::DeprecationWarning" # neuromaps.images.load_data() + ) def test_cornblath_volume_parcellated(self, sample_volume_parcellated): """Test cornblath null model for parcellated volume.""" vol_tuple, parc_name, atlas, annot_parc = sample_volume_parcellated @@ -360,6 +390,7 @@ def test__make_surrogates(): assert False class TestBurt2018: + """Test burt2018 null model.""" @pytest.mark.xfail def test_burt2018(self): @@ -367,6 +398,7 @@ def test_burt2018(self): assert False class TestBurt2020: + """Test burt2020 null model.""" @pytest.mark.xfail def test_burt2020(self): @@ -375,6 +407,7 @@ def test_burt2020(self): class TestMoran: + """Test moran null model.""" @pytest.mark.xfail def test_moran(self): From a434dc25cbb26d5f13b614dd7c5d39c5fd125993 Mon Sep 17 00:00:00 2001 From: Zhen-Qi Liu Date: Fri, 1 Dec 2023 15:49:23 -0500 Subject: [PATCH 6/6] [FIX] Continue --- neuromaps/nulls/tests/test_nulls.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index dc2efec5..d701b019 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -3,7 +3,8 @@ import pytest from neuromaps.nulls.nulls import ( - alexander_bloch, vasa, hungarian, baum, cornblath + alexander_bloch, vasa, hungarian, baum, + cornblath, burt2018, burt2020, moran ) from neuromaps.datasets import fetch_annotation from neuromaps.parcellate import Parcellater @@ -333,6 +334,7 @@ def test_baum_volume_parcellated(self, sample_volume_parcellated): class TestCornblath: """Test cornblath null model.""" + @pytest.mark.skip @pytest.mark.xfail def test_cornblath_surface(self, sample_surface): """Test cornblath null model for surface.""" @@ -340,7 +342,7 @@ def test_cornblath_surface(self, sample_surface): _, _, space, den = surf_tuple cornblath(annot, atlas=space, density=den, n_perm=3) - + @pytest.mark.skip @pytest.mark.xfail def test_cornblath_volume(self, sample_volume): """Test cornblath null model for volume.""" @@ -348,11 +350,11 @@ def test_cornblath_volume(self, sample_volume): _, _, space, res = vol_tuple cornblath(annot, atlas=space, density=res, n_perm=3) - + @pytest.mark.filterwarnings( "ignore::DeprecationWarning" # nilearn/nilearn/pull/3722 ) - def test_cornblathsurface_parcellated(self, sample_surface_parcellated): + def test_cornblath_surface_parcellated(self, sample_surface_parcellated): """Test cornblath null model for parcellated surface.""" surf_tuple, parc_name, atlas, annot_parc = sample_surface_parcellated _, _, space, den = surf_tuple @@ -392,10 +394,18 @@ def test__make_surrogates(): class TestBurt2018: """Test burt2018 null model.""" - @pytest.mark.xfail - def test_burt2018(self): - """Test burt2018 null model.""" - assert False + def test_burt2018_surface(self, sample_surface): + """Test burt2018 null model for surface.""" + surf_tuple, annot = sample_surface + _, _, space, den = surf_tuple + burt2018(annot, atlas=space, density=den, n_perm=3) + + + def test_burt2018_volume(self, sample_volume): + """Test burt2018 null model for volume.""" + vol_tuple, annot = sample_volume + _, _, space, res = vol_tuple + burt2018(annot, atlas=space, density=res, n_perm=3) class TestBurt2020: """Test burt2020 null model."""