diff --git a/neuromaps/datasets/_osf.py b/neuromaps/datasets/_osf.py index 15a2fc63..3b63a397 100644 --- a/neuromaps/datasets/_osf.py +++ b/neuromaps/datasets/_osf.py @@ -2,7 +2,12 @@ """Functions for working with data/osf.json file.""" import os -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import json from nilearn.datasets.utils import _md5_sum_file @@ -23,9 +28,12 @@ INFO_KEYS = ['source', 'refs', 'comments', 'demographics'] # distribution JSON -OSFJSON = resource_filename( - 'neuromaps', os.path.join('datasets', 'data', 'osf.json') -) +if _importlib_avail: + OSFJSON = importlib.resources.files("neuromaps") / "datasets/data/osf.json" +else: + OSFJSON = resource_filename( + 'neuromaps', os.path.join('datasets', 'data', 'osf.json') + ) def parse_filename(fname, return_ext=True, verbose=False): diff --git a/neuromaps/datasets/tests/test__osf.py b/neuromaps/datasets/tests/test__osf.py index 0a410c44..7d3cf479 100644 --- a/neuromaps/datasets/tests/test__osf.py +++ b/neuromaps/datasets/tests/test__osf.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """For testing neuromaps.datasets._osf functionality.""" -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import pytest @@ -21,8 +26,10 @@ def test_parse_fname_list(): def test_parse_json(): - """Test parsing a JSON file.""" - osf = resource_filename('neuromaps', 'datasets/data/osf.json') + if _importlib_avail: + osf = importlib.resources.files("neuromaps") / "datasets/data/osf.json" + else: + osf = resource_filename('neuromaps', 'datasets/data/osf.json') out = _osf.parse_json(osf) assert isinstance(out, list) and all(isinstance(i, dict) for i in out) diff --git a/neuromaps/datasets/utils.py b/neuromaps/datasets/utils.py index 39debb60..d25d6cc9 100644 --- a/neuromaps/datasets/utils.py +++ b/neuromaps/datasets/utils.py @@ -3,7 +3,12 @@ import json import os -from pkg_resources import resource_filename +try: + import importlib.resources + _importlib_avail = True +except ImportError: + from pkg_resources import resource_filename + _importlib_avail = False import requests @@ -70,8 +75,14 @@ def get_dataset_info(name, return_restricted=True): dataset : dict or list-of-dict Information on requested data """ - fn = resource_filename('neuromaps', - os.path.join('datasets', 'data', 'osf.json')) + if _importlib_avail: + fn = importlib.resources.files("neuromaps") / "datasets/data/osf.json" + else: + fn = resource_filename( + 'neuromaps', + os.path.join('datasets', 'data', 'osf.json') + ) + with open(fn) as src: osf_resources = _osfify_urls(json.load(src), return_restricted) diff --git a/neuromaps/nulls/tests/test_nulls.py b/neuromaps/nulls/tests/test_nulls.py index 792f191f..732d1b63 100644 --- a/neuromaps/nulls/tests/test_nulls.py +++ b/neuromaps/nulls/tests/test_nulls.py @@ -117,6 +117,7 @@ def test_fixture_surface_parcellated_smoke(sample_surface_parcellated): # print(surf_tuple, parc_name, annot_parc.shape[0]) pass +@pytest.mark.filterwarnings("ignore::DeprecationWarning") # nilearn/nilearn/pull/3722 def test_fixture_volume_parcellated_smoke(sample_volume_parcellated): vol_tuple, parc_name, annot_parc = sample_volume_parcellated # print(vol_tuple, parc_name, annot_parc.shape[0]) diff --git a/neuromaps/parcellate.py b/neuromaps/parcellate.py index 2dd2f34a..67089057 100644 --- a/neuromaps/parcellate.py +++ b/neuromaps/parcellate.py @@ -2,7 +2,14 @@ """Functionality for parcellating data.""" import nibabel as nib +<<<<<<< HEAD from nilearn.maskers import NiftiLabelsMasker +======= +try: + from nilearn.maskers import NiftiLabelsMasker +except: + from nilearn.input_data import NiftiLabelsMasker +>>>>>>> 276cdaf ([FIX] Fix deprecations) from nilearn.image import new_img_like from nilearn.masking import compute_background_mask import numpy as np