Skip to content

Commit 440233c

Browse files
committed
fix for python 3.12 to display surface plots and fix whitespace issues
with outputs in triple quotes.
1 parent a2c016b commit 440233c

File tree

8 files changed

+165
-112
lines changed

8 files changed

+165
-112
lines changed

CHANGELOG.md

+36
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,42 @@ noted in the changelog (i.e new functions or parameters, changes in parameter de
4141
- *.patch* : Contains no new features, simply fixes any identified bugs.
4242
- *.postN* : Consists of only metadata-related changes, such as updates to type hints or doc strings/documentation.
4343

44+
## [0.11.1] - 2024-06-23
45+
### 🐛 Fixes
46+
- Fix for python 3.12 when using `CAP.caps2surf()`.
47+
- Changes in pathlib.py in Python 3.12 results in an error message format change. The error message now includes
48+
quotes (e.g., "not 'Nifti1Image'") instead of the previous format without quotes ("not Nifti1Image"). This issue
49+
arises when using ``neuromaps.transforms.mni_to_fslr`` within CAP.caps2surf() as neuromaps captures the error as a
50+
string and checks if "not Nifti1Image" is in the string to determine if the input is a NifTI image. As a patch,
51+
if the error occurs, a temporary .nii.gz file is created, the statistical image is saved to this file, and it is
52+
used as input for neuromaps.transforms.mni_to_fslr. The temporary file is deleted after use. Below is the code
53+
implementing this fix.
54+
55+
```python3
56+
# Fix for python 3.12, saving stat_map so that it is path instead of a NifTI
57+
try:
58+
gii_lh, gii_rh = mni152_to_fslr(stat_map, method=method, fslr_density=fslr_density)
59+
except TypeError:
60+
# Create temp
61+
temp_nifti = tempfile.NamedTemporaryFile(delete=False, suffix=".nii.gz")
62+
warnings.warn(textwrap.dedent(f"""
63+
Error potentially due to change in pathlib.py in python 3.12 causing the error
64+
message to output as "not 'Nifti1Image'" instead of "not Nifti1Image", which
65+
neuromaps uses to determine if the input is a Nifti1Image object.
66+
Converting stat_map into a temporary nii.gz file (which will be automatically
67+
deleted afterwards) at {temp_nifti.name}
68+
"""))
69+
# Ensure file is closed
70+
temp_nifti.close()
71+
# Save temporary nifti to temp file
72+
nib.save(stat_map, temp_nifti.name)
73+
gii_lh, gii_rh = mni152_to_fslr(temp_nifti.name, method=method, fslr_density=fslr_density)
74+
# Delete
75+
os.unlink(temp_nifti.name)
76+
```
77+
- Final patch is for strings in triple quotes. The standard textwrap module is used to remove the indentations at each
78+
new line.
79+
4480
## [0.11.0.post2] - 2024-06-22
4581
### 💻 Metadata
4682
- Very minor explanation added to `CAP.calculate_metrics()` regarding using individual dictionaries from merged

neurocaps/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22

33
__all__=["analysis", "extraction"]
44
# Version in a single place
5-
__version__ = "0.11.0.post2"
5+
__version__ = "0.11.1"

neurocaps/_utils/_check_parcel_approach.py

+13-13
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""Internal function for checking the validity of parcel_approach."""
2-
import copy, os, re, warnings
2+
import copy, os, re, textwrap, warnings
33
from nilearn import datasets
44

55
def _check_parcel_approach(parcel_approach, call = "TimeseriesExtractor"):
@@ -15,22 +15,22 @@ def _check_parcel_approach(parcel_approach, call = "TimeseriesExtractor"):
1515
"rh": [5]}}}}
1616

1717
if not isinstance(parcel_approach,dict) or isinstance(parcel_approach,dict) and len(parcel_approach) > 0 and not isinstance(parcel_approach[list(parcel_approach)[0]],dict):
18-
raise ValueError(f"""
18+
raise ValueError(textwrap.dedent(f"""
1919
Please include a valid `parcel_approach` in one of the following dictionary
2020
formats for 'Schaefer' or 'AAL' {valid_parcel_dict}
21-
""")
21+
"""))
2222

2323
if len(parcel_approach) > 1:
24-
raise ValueError(f"""
24+
raise ValueError(textwrap.dedent(f"""
2525
Only one parcellation approach can be selected.
2626
Example format of `parcel_approach`: {valid_parcel_dict}
27-
""")
27+
"""))
2828

2929
if "Schaefer" not in parcel_approach and "AAL" not in parcel_approach and "Custom" not in parcel_approach:
30-
raise ValueError(f"""
30+
raise ValueError(textwrap.dedent(f"""
3131
Please include a valid `parcel_approach` in one of the following formats for
3232
'Schaefer', 'AAL', or 'Custom': {valid_parcel_dict}
33-
""")
33+
"""))
3434

3535
if "Schaefer" in parcel_approach:
3636
if "n_rois" not in parcel_approach["Schaefer"]:
@@ -72,30 +72,30 @@ def _check_parcel_approach(parcel_approach, call = "TimeseriesExtractor"):
7272

7373
if "Custom" in parcel_approach:
7474
if call == "TimeseriesExtractor" and "maps" not in parcel_approach["Custom"]:
75-
raise ValueError(f"""
75+
raise ValueError(textwrap.dedent(f"""
7676
For `Custom` parcel_approach, a nested key-value pair containing the key 'maps' with the
7777
value being a string specifying the location of the parcellation is needed.
7878
Example: {valid_parcel_dict['Custom']}
79-
""")
79+
"""))
8080
check_subkeys = ["nodes" in parcel_approach["Custom"], "regions" in parcel_approach["Custom"]]
8181
if not all(check_subkeys):
8282
missing_subkeys = [["nodes", "regions"][x] for x,y in enumerate(check_subkeys) if y is False]
8383
error_message = f"The following sub-keys haven't been detected {missing_subkeys}"
8484
if call == "TimeseriesExtractor":
85-
warnings.warn(f"""
85+
warnings.warn(textwrap.dedent(f"""
8686
{error_message}.
8787
These labels are not needed for timeseries extraction but are needed for future
88-
timeseries or CAPs plotting.""")
88+
timeseries or CAPs plotting."""))
8989
else:
9090
custom_example = {"Custom": {"nodes": ["LH_Vis1", "LH_Vis2", "LH_Hippocampus",
9191
"RH_Vis1", "RH_Vis2", "RH_Hippocampus"],
9292
"regions": {"Vis" : {"lh": [0,1],
9393
"rh": [3,4]}},
9494
"Hippocampus": {"lh": [2],"rh": [5]}}}
95-
raise ValueError(f"""
95+
raise ValueError(textwrap.dedent(f"""
9696
{error_message}.
9797
These subkeys are needed for plotting. Please reassign `parcel_approach` using
98-
`self.parcel_approach` amd refer to the example structure: {custom_example}""")
98+
`self.parcel_approach` amd refer to the example structure: {custom_example}"""))
9999
if call == "TimeseriesExtractor" and not os.path.isfile(parcel_approach["Custom"]["maps"]):
100100
raise ValueError("Please specify the location to the custom parcellation to be used.")
101101

neurocaps/_utils/_timeseriesextractor_internals/_check_confound_names.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""Internal Function for checking confound names"""
22

3-
import warnings
3+
import textwrap, warnings
44
def _check_confound_names(high_pass, specified_confound_names, n_acompcor_separate):
55
if specified_confound_names is None:
66
if high_pass:
@@ -26,17 +26,17 @@ def _check_confound_names(high_pass, specified_confound_names, n_acompcor_separa
2626
if len(confound_names) > len(check_confounds):
2727
removed_confounds = [element for element in confound_names if element not in check_confounds]
2828
if specified_confound_names:
29-
warnings.warn(f"""
29+
warnings.warn(textwrap.dedent(f"""
3030
Since `n_acompcor_separate` has been specified, specified acompcor components in
3131
`confound_names` will be disregarded and replaced with the first {n_acompcor_separate}
3232
components of the white matter and cerebrospinal fluid masks for each participant.
3333
The following components will not be used {removed_confounds}
34-
""")
34+
"""))
3535
confound_names = check_confounds
3636

37-
print(f"""
37+
print(textwrap.dedent(f"""
3838
List of confound regressors that will be used during timeseries extraction if available in confound
3939
dataframe: {confound_names}
40-
""", flush=True)
40+
"""), flush=True)
4141

4242
return confound_names

neurocaps/_utils/_timeseriesextractor_internals/_extract_timeseries.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""Internal function to extract timeseries with or without multiprocessing"""
22

3-
import copy, json, math, os, warnings
3+
import copy, json, math, os, textwrap, warnings
44
import numpy as np, pandas as pd
55
from nilearn.maskers import NiftiLabelsMasker
66
from nilearn.image import index_img, load_img
@@ -79,10 +79,10 @@ def _extract_timeseries(subj_id, nifti_files, mask_files, event_files, confound_
7979
fd_array = confound_df["framewise_displacement"].fillna(0).values
8080
else:
8181
censor = False
82-
warnings.warn(f"""For subject {subj_id}, `fd_threshold` specified but 'framewise_displacement' is
82+
warnings.warn(textwrap.dedent(f"""For subject {subj_id}, `fd_threshold` specified but 'framewise_displacement' is
8383
not a column in the confound dataframe so removal of volumes after nuisance
8484
regression will not be done.
85-
""")
85+
"""))
8686
else:
8787
censor = False
8888

@@ -149,10 +149,10 @@ def _extract_timeseries(subj_id, nifti_files, mask_files, event_files, confound_
149149
timeseries = np.delete(timeseries, censor_volumes, axis=0)
150150

151151
if timeseries.shape[0] == 0:
152-
warnings.warn(f"""
152+
warnings.warn(textwrap.dedent(f"""
153153
Subject {subj_id} timeseries is empty for {run}. Most likely due to condition not
154154
existing or TRs corresponding to the condition being removed by `dummy_scans`.
155-
""")
155+
"""))
156156
else:
157157
subject_timeseries[subj_id].update({run_id: timeseries})
158158

neurocaps/_utils/_timeseriesextractor_internals/_timeseriesextractorgetter.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""# A class which is responsible for accessing all TimeseriesExtractorGetter and to keep track of all
22
attributes in TimeSeriesExtractor"""
3-
import copy
3+
import copy, textwrap
44
import numpy as np
55
from .._check_parcel_approach import _check_parcel_approach
66
from .._pickle_to_dict import _convert_pickle_to_dict
@@ -52,11 +52,11 @@ def subject_timeseries(self):
5252

5353
@subject_timeseries.setter
5454
def subject_timeseries(self, subject_dict):
55-
error_message = """
55+
error_message = textwrap.dedent("""
5656
A valid pickle file/be a nested dictionary where the first level is the subject id, second level
5757
is the run number in the form of 'run-#', and the final level is the timeseries as a numpy
5858
array.
59-
"""
59+
""")
6060
if isinstance(subject_dict, str) and subject_dict.endswith(".pkl"):
6161
self._subject_timeseries = _convert_pickle_to_dict(subject_dict)
6262
elif isinstance(subject_dict, dict):

0 commit comments

Comments
 (0)