Skip to content

Commit

Permalink
♻️ Import global Nipype loggers
Browse files Browse the repository at this point in the history
  • Loading branch information
shnizzedy committed Feb 20, 2024
1 parent 25ea215 commit 26ebd1e
Show file tree
Hide file tree
Showing 53 changed files with 358 additions and 412 deletions.
46 changes: 23 additions & 23 deletions CPAC/_entrypoints/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
from warnings import simplefilter

import yaml
from nipype import logging

from CPAC import __version__, license_notice
from CPAC.pipeline import AVAILABLE_PIPELINE_CONFIGS
Expand All @@ -48,11 +47,10 @@
upgrade_pipeline_to_1_8,
)
from CPAC.utils.docs import DOCS_URL_PREFIX
from CPAC.utils.monitoring import failed_to_start, log_nodes_cb
from CPAC.utils.monitoring import failed_to_start, FMLOGGER, log_nodes_cb, WFLOGGER
from CPAC.utils.utils import update_nested_dict

simplefilter(action="ignore", category=FutureWarning)
logger = logging.getLogger("nipype.workflow")
DEFAULT_TMP_DIR = "/tmp"


Expand Down Expand Up @@ -477,7 +475,7 @@ def run_main():
if not args.group_file or not os.path.exists(args.group_file):
import pkg_resources as p

logger.warning("\nNo group analysis configuration file was supplied.\n")
WFLOGGER.warning("\nNo group analysis configuration file was supplied.\n")

args.group_file = p.resource_filename(
"CPAC",
Expand All @@ -493,11 +491,11 @@ def run_main():
if not os.path.exists(output_group):
shutil.copyfile(args.group_file, output_group)
except (Exception, IOError):
logger.warning(
FMLOGGER.warning(
"Could not create group analysis configuration file.\nPlease refer to the C-PAC documentation for group analysis setup."
)
else:
logger.warning(
WFLOGGER.warning(
"Please refer to the output directory for a template of the file and, after customizing to your analysis, add the flag\n\n --group_file %s\n\nto your `docker run` command\n",
output_group,
)
Expand All @@ -507,7 +505,7 @@ def run_main():
else:
import CPAC.pipeline.cpac_group_runner as cgr

logger.info(
WFLOGGER.info(
"Starting group level analysis of data in %s using %s",
bids_dir,
args.group_file,
Expand Down Expand Up @@ -537,14 +535,14 @@ def run_main():
# validate input dir (if skip_bids_validator is not set)
if not args.data_config_file:
if args.bids_validator_config:
logger.info("Running BIDS validator...")
WFLOGGER.info("Running BIDS validator...")
run(f"bids-validator --config {args.bids_validator_config} {bids_dir}")
elif args.skip_bids_validator:
logger.info("Skipping BIDS validator...")
WFLOGGER.info("Skipping BIDS validator...")
elif bids_dir_is_s3:
logger.info("Skipping BIDS validator for S3 datasets...")
WFLOGGER.info("Skipping BIDS validator for S3 datasets...")
else:
logger.info("Running BIDS validator...")
WFLOGGER.info("Running BIDS validator...")
run(f"bids-validator {bids_dir}")

if args.preconfig:
Expand All @@ -560,7 +558,7 @@ def run_main():
if "pipeline_setup" not in c:
_url = f"{DOCS_URL_PREFIX}/user/pipelines/1.7-1.8-nesting-mappings"

logger.warning(
WFLOGGER.warning(
"\nC-PAC changed its pipeline configuration "
"format in v1.8.0.\nSee %s for details.\n",
_url,
Expand Down Expand Up @@ -711,7 +709,7 @@ def run_main():
output_dir, "working"
)
else:
logger.warning(
FMLOGGER.warning(
"Cannot write working directory to S3 bucket. "
"Either change the output directory to something "
"local or turn off the --save_working_dir flag"
Expand All @@ -733,19 +731,21 @@ def run_main():
]["calculate_motion_after"] = True

if args.participant_label:
logger.info("#### Running C-PAC for %s", ", ".join(args.participant_label))
WFLOGGER.info(
"#### Running C-PAC for %s", ", ".join(args.participant_label)
)
else:
logger.info("#### Running C-PAC")
WFLOGGER.info("#### Running C-PAC")

logger.info(
WFLOGGER.info(
"Number of participants to run in parallel: %s",
c["pipeline_setup", "system_config", "num_participants_at_once"],
)

if not args.data_config_file:
logger.info("Input directory: %s", bids_dir)
WFLOGGER.info("Input directory: %s", bids_dir)

logger.info(
WFLOGGER.info(
"Output directory: %s\nWorking directory: %s\nLog directory: %s\n"
"Remove working directory: %s\nAvailable memory: %s (GB)\n"
"Available threads: %s\nNumber of threads for ANTs: %s",
Expand Down Expand Up @@ -805,7 +805,7 @@ def run_main():
args.participant_ndx = os.environ["AWS_BATCH_JOB_ARRAY_INDEX"]

if 0 <= participant_ndx < len(sub_list):
logger.info(
WFLOGGER.info(
"Processing data for participant %s (%s)",
args.participant_ndx,
sub_list[participant_ndx]["subject_id"],
Expand Down Expand Up @@ -884,7 +884,7 @@ def run_main():
TypeError,
ValueError,
) as e:
logger.warning(
WFLOGGER.warning(
"The run will continue without monitoring. Monitoring was configured to be enabled, but the monitoring server failed to start, so : %s\n",
e,
)
Expand Down Expand Up @@ -916,7 +916,7 @@ def run_main():
],
}

logger.info("Starting participant level processing")
WFLOGGER.info("Starting participant level processing")
exitcode = CPAC.pipeline.cpac_runner.run(
data_config_file,
pipeline_config_file,
Expand All @@ -931,7 +931,7 @@ def run_main():

if args.analysis_level == "test_config":
if exitcode == 0:
logger.info(
WFLOGGER.info(
"\nPipeline and data configuration files should"
" have been written to %s and %s respectively.\n",
pipeline_config_file,
Expand All @@ -942,7 +942,7 @@ def run_main():
from CPAC.utils.monitoring import LOGTAIL

for warning in LOGTAIL["warnings"]:
logger.warning("%s\n", warning.rstrip())
WFLOGGER.warning("%s\n", warning.rstrip())

sys.exit(exitcode)

Expand Down
3 changes: 0 additions & 3 deletions CPAC/anat_preproc/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,12 +463,9 @@ def normalize_wmparc(source_file, target_file, xfm, out_file):


"""This module provides interfaces for workbench -volume-remove-islands commands"""
from nipype import logging
from nipype.interfaces.base import CommandLineInputSpec, File, TraitedSpec
from nipype.interfaces.workbench.base import WBCommand

iflogger = logging.getLogger("nipype.interface")


class VolumeRemoveIslandsInputSpec(CommandLineInputSpec):
in_file = File(
Expand Down
7 changes: 3 additions & 4 deletions CPAC/connectome/connectivity_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,13 @@

import numpy as np
from nilearn.connectome import ConnectivityMeasure
from nipype import logging
from nipype.interfaces import utility as util

from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.utils.interfaces.function import Function
from CPAC.utils.interfaces.netcorr import NetCorr, strip_afni_output_header
from CPAC.utils.monitoring import IFLOGGER

logger = logging.getLogger("nipype.workflow")
connectome_methods = {
"afni": {"Pearson": "", "Partial": "-part_corr"},
"nilearn": {"Pearson": "correlation", "Partial": "partial correlation"},
Expand Down Expand Up @@ -85,8 +84,8 @@ def get_connectome_method(method, tool):
cm_method = connectome_methods[tool.lower()].get(method, NotImplemented)
if cm_method is NotImplemented:
warning_message = f"{method} has not yet been implemented for {tool} in C-PAC."
if logger:
logger.warning(NotImplementedError(warning_message))
if IFLOGGER:
IFLOGGER.warning(NotImplementedError(warning_message))
else:
warn(warning_message, category=Warning)
return cm_method
Expand Down
4 changes: 0 additions & 4 deletions CPAC/func_preproc/func_ingress.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,8 @@

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
from nipype import logging

from CPAC.utils.datasource import create_func_datasource, ingress_func_metadata

logger = logging.getLogger("nipype.workflow")


def connect_func_ingress(
workflow, strat_list, c, sub_dict, subject_id, input_creds_path, unique_id=None
Expand Down
3 changes: 0 additions & 3 deletions CPAC/func_preproc/func_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,7 @@
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
"""Functional preprocessing."""
# pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position
from nipype import logging
from nipype.interfaces import afni, ants, fsl, utility as util

logger = logging.getLogger("nipype.workflow")
from nipype.interfaces.afni import preprocess, utils as afni_utils

from CPAC.func_preproc.utils import nullify
Expand Down
2 changes: 1 addition & 1 deletion CPAC/image_utils/tests/test_smooth.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import CPAC.utils.test_init as test_utils
from CPAC.utils.test_mocks import configuration_strategy_mock

logger = getLogger("nipype.workflow")
logger = getLogger("CPAC.image_utils.tests")
basicConfig(format="%(message)s", level=INFO)


Expand Down
6 changes: 2 additions & 4 deletions CPAC/isc/isc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@
import numpy as np

from CPAC.utils import correlation
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.monitoring import IFLOGGER
from .utils import p_from_null, phase_randomize

logger = getLogger("nipype.workflow")


def isc(D, std=None, collapse_subj=True):
assert D.ndim == 3
Expand Down Expand Up @@ -65,7 +63,7 @@ def isc_significance(ISC, min_null, max_null, two_sided=False):


def isc_permutation(permutation, D, masked, collapse_subj=True, random_state=0):
logger.info("Permutation %s", permutation)
IFLOGGER.info("Permutation %s", permutation)
min_null = 1
max_null = -1

Expand Down
6 changes: 2 additions & 4 deletions CPAC/isc/isfc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@
import numpy as np

from CPAC.utils import correlation
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.monitoring import IFLOGGER
from .utils import p_from_null, phase_randomize

logger = getLogger("nipype.workflow")


def isfc(D, std=None, collapse_subj=True):
assert D.ndim == 3
Expand Down Expand Up @@ -67,7 +65,7 @@ def isfc_significance(ISFC, min_null, max_null, two_sided=False):


def isfc_permutation(permutation, D, masked, collapse_subj=True, random_state=0):
logger.info("Permutation %s", permutation)
IFLOGGER.info("Permutation %s", permutation)
min_null = 1
max_null = -1

Expand Down
8 changes: 3 additions & 5 deletions CPAC/longitudinal_pipeline/longitudinal_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,9 @@
import nipype.interfaces.utility as util

from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.monitoring import IFLOGGER
from CPAC.utils.nifti_utils import nifti_image_input

logger = getLogger("nipype.workflow")


def read_ants_mat(ants_mat_file):
"""Read a matrix, returning (translation) and (other transformations) matrices."""
Expand Down Expand Up @@ -162,7 +160,7 @@ def template_convergence(
msg = f"template_convergence: matrix type {mat_type} does not exist"
raise ValueError(msg)
distance = norm_transformations(translation, oth_transform)
logger.info("distance = %s", abs(distance))
IFLOGGER.info("distance = %s", abs(distance))

return abs(distance) <= convergence_threshold

Expand Down Expand Up @@ -451,7 +449,7 @@ def template_creation_flirt(
convergence_threshold = np.finfo(np.float64).eps

if len(input_brain_list) == 1 or len(input_skull_list) == 1:
logger.warning(
IFLOGGER.warning(
"input_brain_list or input_skull_list contains only 1 image, "
"no need to calculate template"
)
Expand Down
3 changes: 0 additions & 3 deletions CPAC/longitudinal_pipeline/longitudinal_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
import os

from nipype import logging
from nipype.interfaces import fsl
import nipype.interfaces.io as nio
from indi_aws import aws_utils
Expand Down Expand Up @@ -47,8 +46,6 @@
from CPAC.utils.strategy import Strategy
from CPAC.utils.utils import check_config_resources, check_prov_for_regtool

logger = logging.getLogger("nipype.workflow")


@nodeblock(
name="mask_T1w_longitudinal_template",
Expand Down
3 changes: 0 additions & 3 deletions CPAC/network_centrality/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
from nipype import logging
from nipype.interfaces import fsl

from CPAC.network_centrality.network_centrality import create_centrality_wf
Expand All @@ -23,8 +22,6 @@
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.schema import valid_options

logger = logging.getLogger("nipype.workflow")


def connect_centrality_workflow(
workflow,
Expand Down
6 changes: 2 additions & 4 deletions CPAC/network_centrality/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,9 @@
from CPAC.pipeline.schema import valid_options
from CPAC.utils.docs import docstring_parameter
from CPAC.utils.interfaces.function import Function
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.monitoring import IFLOGGER
from CPAC.utils.typing import ITERABLE, LIST

logger = getLogger("nipype.workflow")


def convert_pvalue_to_r(datafile, p_value, two_tailed=False):
"""
Expand Down Expand Up @@ -395,7 +393,7 @@ class ThresholdError(ValueError):
def __init__(self, threshold_option, threshold):
self.threshold_option = threshold_option
self.threshold = threshold
logger.error("%s", type(threshold))
IFLOGGER.error("%s", type(threshold))
self.message = f"For '{threshold_option}', threshold value must be "
if threshold_option in ("Significance threshold", "Sparsity threshold"):
self.message += "a positive number greater than 0 "
Expand Down
Loading

0 comments on commit 26ebd1e

Please sign in to comment.