From 329c74d63a1e1fb963d8e01035e36617acfaaec9 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 11:18:52 -0800 Subject: [PATCH 01/36] add variance-driven component selection, return component metadata --- nipype/algorithms/confounds.py | 81 ++++++++++++++++++++++++++-------- 1 file changed, 63 insertions(+), 18 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 79c0b96f4e..96ee500fda 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1139,15 +1139,29 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): return [img] -def compute_noise_components(imgseries, mask_images, num_components, - filter_type, degree, period_cut, repetition_time): +def compute_noise_components(imgseries, mask_images, num_components=0.5, + filter_type=False, degree=0, period_cut=128, + repetition_time=None, failure_mode='error'): """Compute the noise components from the imgseries for each mask - imgseries: a nibabel img - mask_images: a list of nibabel images - num_components: number of noise components to return - filter_type: type off filter to apply to time series before computing - noise components. + Parameters + ---------- + imgseries: nibabel NIfTI object + Time series data to be decomposed. + mask_images: list + List of nibabel images. Time series data from `img_series` is subset + according to the spatial extent of each mask, and the subset data is + then decomposed using principal component analysis. Masks should be + coextensive with either anatomical or spatial noise ROIs. + num_components: float + Number of noise components to return. If this is a decimal value + between 0 and 1, then `create_noise_components` will instead return + the smallest number of components necessary to explain the indicated + fraction of variance. If `num_components` is -1, then all + components will be returned. + filter_type: str + Type of filter to apply to time series before computing + noise components. 'polynomial' - Legendre polynomial basis 'cosine' - Discrete cosine (DCT) basis False - None (mean-removal only) @@ -1158,16 +1172,20 @@ def compute_noise_components(imgseries, mask_images, num_components, period_cut: minimum period (in sec) for DCT high-pass filter repetition_time: time (in sec) between volume acquisitions - returns: - - components: a numpy array - basis: a numpy array containing the (non-constant) filter regressors - + Outputs + ------- + components: numpy array + Numpy array containing the requested set of noise components + basis: numpy array + Numpy array containing the (non-constant) filter regressors + metadata: dict(numpy array) + Dictionary of eigenvalues, fractional explained variances, and + cumulative explained variances. """ components = None basis = np.array([]) - for img in mask_images: - mask = img.get_data().astype(np.bool) + for i, img in enumerate(mask_images): + mask = img.get_data().astype(np.bool).squeeze() if imgseries.shape[:3] != mask.shape: raise ValueError( 'Inputs for CompCor, timeseries and mask, do not have ' @@ -1201,20 +1219,47 @@ def compute_noise_components(imgseries, mask_images, num_components, # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." try: - u, _, _ = fallback_svd(M, full_matrices=False) + u, s, _ = fallback_svd(M, full_matrices=False) except np.linalg.LinAlgError: if self.inputs.failure_mode == 'error': raise - u = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan + if num_components >= 1: + u = np.empty((M.shape[0], num_components), + dtype=np.float32) * np.nan + else: + continue + + variance_explained = np.array([value**2/np.sum(s**2) for value in s]) + cumulative_variance_explained = np.cumsum(variance_explained) + if 0 < num_components < 1: + num_components = np.searchsorted(cumulative_variance_explained, + num_components) + 1 + elif num_components == -1: + num_components = len(s) if components is None: components = u[:, :num_components] + metadata = { + 'mask': np.array([i] * len(s)), + 'singular_values': s, + 'variance_explained': variance_explained, + 'cumulative_variance_explained': cumulative_variance_explained + } else: components = np.hstack((components, u[:, :num_components])) - if components is None and num_components > 0: + metadata['mask'] = np.hstack((metadata['mask'], [i] * len(s))) + metadata['singular_values'] = ( + np.hstack((metadata['singular_values'], s))) + metadata['variance_explained'] = ( + np.hstack((metadata['variance_explained'], + variance_explained))) + metadata['cumulative_variance_explained'] = ( + np.hstack((metadata['cumulative_variance_explained'], + cumulative_variance_explained))) + if components is None and num_components != 0: if self.inputs.failure_mode == 'error': raise ValueError('No components found') components = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan - return components, basis + return components, basis, metadata def _compute_tSTD(M, x, axis=0): From 17f3e120b5cb48e14000a43203828c78c23b76d0 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 12:32:20 -0800 Subject: [PATCH 02/36] expose metadata to interface, fix component selection for multiple masks --- nipype/algorithms/confounds.py | 67 +++++++++++++++++++++++++--------- 1 file changed, 49 insertions(+), 18 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 96ee500fda..c8c8c1ac1d 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -390,7 +390,16 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL + num_components = traits.Float(6, usedefault=True, + desc='Number of components to return from the decomposition.' + 'If `num_components` is a positive integer, then ' + '`num_components` components will be retained. If ' + '`num_components` is a fractional value between 0 and 1, then ' + 'the number of components retained will be equal to the minimum ' + 'number of components necessary to explain the provided ' + 'fraction of variance in the masked time series. If ' + '`num_components` is -1, then all components will be retained.') + # 6 for BOLD, 4 for ASL pre_filter = traits.Enum( 'polynomial', 'cosine', @@ -418,6 +427,8 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'unspecified') save_pre_filter = traits.Either( traits.Bool, File, desc='Save pre-filter basis as text file') + save_metadata = traits.Either( + traits.Bool, File, desc='Save component metadata as text file') ignore_initial_volumes = traits.Range( low=0, usedefault=True, @@ -433,6 +444,7 @@ class CompCorOutputSpec(TraitedSpec): components_file = File( exists=True, desc='text file containing the noise components') pre_filter_file = File(desc='text file containing high-pass filter basis') + metadata_file = File(desc='text file containing component metadata') class CompCor(BaseInterface): @@ -548,7 +560,7 @@ def _run_interface(self, runtime): '{} cannot detect repetition time from image - ' 'Set the repetition_time input'.format(self._header)) - components, filter_basis = compute_noise_components( + components, filter_basis, metadata = compute_noise_components( imgseries.get_data(), mask_images, self.inputs.num_components, self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR) @@ -597,6 +609,16 @@ def _run_interface(self, runtime): header='\t'.join(header), comments='') + if self.inputs.save_metadata: + metadata_file = self._list_outputs()['metadata_file'] + np.savetxt( + metadata_file, + np.vstack(metadata.values()).T, + fmt=['%s', b'%.10f', b'%.10f', b'%.10f'], + delimiter='\t', + header='\t'.join(list(metadata.keys())), + comments='') + return runtime def _process_masks(self, mask_images, timeseries=None): @@ -613,6 +635,12 @@ def _list_outputs(self): save_pre_filter = os.path.abspath('pre_filter.tsv') outputs['pre_filter_file'] = save_pre_filter + save_metadata = self.inputs.save_metadata + if save_metadata: + if isinstance(save_metadata, bool): + save_metadata = os.path.abspath('component_metadata.tsv') + outputs['metadata_file'] = save_metadata + return outputs def _make_headers(self, num_col): @@ -1139,7 +1167,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): return [img] -def compute_noise_components(imgseries, mask_images, num_components=0.5, +def compute_noise_components(imgseries, mask_images, components_criterion=0.5, filter_type=False, degree=0, period_cut=128, repetition_time=None, failure_mode='error'): """Compute the noise components from the imgseries for each mask @@ -1153,11 +1181,11 @@ def compute_noise_components(imgseries, mask_images, num_components=0.5, according to the spatial extent of each mask, and the subset data is then decomposed using principal component analysis. Masks should be coextensive with either anatomical or spatial noise ROIs. - num_components: float + components_criterion: float Number of noise components to return. If this is a decimal value between 0 and 1, then `create_noise_components` will instead return the smallest number of components necessary to explain the indicated - fraction of variance. If `num_components` is -1, then all + fraction of variance. If `components_criterion` is -1, then all components will be returned. filter_type: str Type of filter to apply to time series before computing @@ -1204,38 +1232,40 @@ def compute_noise_components(imgseries, mask_images, num_components=0.5, voxel_timecourses, repetition_time, period_cut) elif filter_type in ('polynomial', False): # from paper: - # "The constant and linear trends of the columns in the matrix M were - # removed [prior to ...]" + # "The constant and linear trends of the columns in the matrix M + # were removed [prior to ...]" voxel_timecourses, basis = regress_poly(degree, voxel_timecourses) - # "Voxel time series from the noise ROI (either anatomical or tSTD) were - # placed in a matrix M of size Nxm, with time along the row dimension - # and voxels along the column dimension." + # "Voxel time series from the noise ROI (either anatomical or tSTD) + # were placed in a matrix M of size Nxm, with time along the row + # dimension and voxels along the column dimension." M = voxel_timecourses.T # "[... were removed] prior to column-wise variance normalization." M = M / _compute_tSTD(M, 1.) - # "The covariance matrix C = MMT was constructed and decomposed into its - # principal components using a singular value decomposition." + # "The covariance matrix C = MMT was constructed and decomposed into + # its principal components using a singular value decomposition." try: u, s, _ = fallback_svd(M, full_matrices=False) except np.linalg.LinAlgError: if self.inputs.failure_mode == 'error': raise - if num_components >= 1: - u = np.empty((M.shape[0], num_components), + if components_criterion >= 1: + u = np.empty((M.shape[0], components_criterion), dtype=np.float32) * np.nan else: continue variance_explained = np.array([value**2/np.sum(s**2) for value in s]) cumulative_variance_explained = np.cumsum(variance_explained) - if 0 < num_components < 1: + if 0 < components_criterion < 1: num_components = np.searchsorted(cumulative_variance_explained, - num_components) + 1 - elif num_components == -1: + components_criterion) + 1 + elif components_criterion == -1: num_components = len(s) + else: + num_components = components_criterion if components is None: components = u[:, :num_components] metadata = { @@ -1258,7 +1288,8 @@ def compute_noise_components(imgseries, mask_images, num_components=0.5, if components is None and num_components != 0: if self.inputs.failure_mode == 'error': raise ValueError('No components found') - components = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan + components = np.ones((M.shape[0], num_components), + dtype=np.float32) * np.nan return components, basis, metadata From 114e6d43f244881136a5d53aff768525d1d987f1 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 12:35:42 -0800 Subject: [PATCH 03/36] propagate failure mode if provided --- nipype/algorithms/confounds.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index c8c8c1ac1d..3d06fdd6ee 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -562,7 +562,8 @@ def _run_interface(self, runtime): components, filter_basis, metadata = compute_noise_components( imgseries.get_data(), mask_images, self.inputs.num_components, - self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR) + self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, + self.inputs.failure_mode) if skip_vols: old_comp = components From 6f4fc19749d8a11864bb6ff20f728424cf83e716 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 13:34:56 -0800 Subject: [PATCH 04/36] allow mask naming in metadata --- nipype/algorithms/confounds.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 3d06fdd6ee..7c2ada89fb 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -386,6 +386,10 @@ class CompCorInputSpec(BaseInterfaceInputSpec): requires=['mask_files'], desc=('Position of mask in `mask_files` to use - ' 'first is the default.')) + mask_names = traits.List(traits.Str, + desc='Names for provided masks (for printing into metadata). ' + 'If provided, it must be as long as the final mask list ' + '(after any merge and indexing operations).') components_file = traits.Str( 'components_file.txt', usedefault=True, @@ -563,7 +567,7 @@ def _run_interface(self, runtime): components, filter_basis, metadata = compute_noise_components( imgseries.get_data(), mask_images, self.inputs.num_components, self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, - self.inputs.failure_mode) + self.inputs.failure_mode, self.inputs.mask_names) if skip_vols: old_comp = components @@ -612,13 +616,11 @@ def _run_interface(self, runtime): if self.inputs.save_metadata: metadata_file = self._list_outputs()['metadata_file'] - np.savetxt( - metadata_file, - np.vstack(metadata.values()).T, - fmt=['%s', b'%.10f', b'%.10f', b'%.10f'], - delimiter='\t', - header='\t'.join(list(metadata.keys())), - comments='') + with open(metadata_file, 'w') as f: + f.write('{}\t{}\t{}\t{}\n'.format(*list(metadata.keys()))) + for i in zip(*metadata.values()): + f.write('{0[0]}\t{0[1]:.10f}\t{0[2]:.10f}\t' + '{0[3]:.10f}\n'.format(i)) return runtime @@ -650,6 +652,9 @@ def _make_headers(self, num_col): headers = ['{}{:02d}'.format(header, i) for i in range(num_col)] return '\t'.join(headers) + def _print_metadata(self, x, f): + f.write('{0[0]}\t{0[1]:.10f}\t{0[2]:.10f}\t{0[3]:.10f}\n'.format(x)) + class ACompCor(CompCor): """ @@ -1170,7 +1175,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): def compute_noise_components(imgseries, mask_images, components_criterion=0.5, filter_type=False, degree=0, period_cut=128, - repetition_time=None, failure_mode='error'): + repetition_time=None, failure_mode='error', + mask_names=''): """Compute the noise components from the imgseries for each mask Parameters @@ -1213,7 +1219,9 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, """ components = None basis = np.array([]) - for i, img in enumerate(mask_images): + if not mask_names: + mask_names = range(len(mask_images)) + for i, img in zip(mask_names, mask_images): mask = img.get_data().astype(np.bool).squeeze() if imgseries.shape[:3] != mask.shape: raise ValueError( From 4d2208e874591a83867dfd2db528b4add7617451 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 14:48:56 -0800 Subject: [PATCH 05/36] add contributor --- .zenodo.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index 9fccdcc316..f773f8854f 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -301,6 +301,11 @@ "name": "Liem, Franz", "orcid": "0000-0003-0646-4810" }, + { + "affiliation": "Stanford University", + "name": "Ciric, Rastko", + "orcid": "0000-0001-6347-7939" + }, { "affiliation": "UniversityHospital Heidelberg, Germany", "name": "Kleesiek, Jens" From bfbde82ee16c0803b16bf3152035d951a33e0305 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 15:10:40 -0800 Subject: [PATCH 06/36] include component index in metadata --- nipype/algorithms/confounds.py | 37 +++++++++++++++++----------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 7c2ada89fb..0b85e4b3c1 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -578,12 +578,13 @@ def _run_interface(self, runtime): components_file = os.path.join(os.getcwd(), self.inputs.components_file) + components_header = self._make_headers(components.shape[1]) np.savetxt( components_file, components, fmt=b"%.10f", delimiter='\t', - header=self._make_headers(components.shape[1]), + header='\t'.join(components_header), comments='') if self.inputs.pre_filter and self.inputs.save_pre_filter: @@ -617,10 +618,11 @@ def _run_interface(self, runtime): if self.inputs.save_metadata: metadata_file = self._list_outputs()['metadata_file'] with open(metadata_file, 'w') as f: - f.write('{}\t{}\t{}\t{}\n'.format(*list(metadata.keys()))) - for i in zip(*metadata.values()): - f.write('{0[0]}\t{0[1]:.10f}\t{0[2]:.10f}\t' - '{0[3]:.10f}\n'.format(i)) + f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', + *list(metadata.keys()))) + for i in zip(components_header, *metadata.values()): + f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' + '{0[3]:.10f}\t{0[4]:.10f}\n'.format(i)) return runtime @@ -650,10 +652,7 @@ def _make_headers(self, num_col): header = self.inputs.header_prefix if \ isdefined(self.inputs.header_prefix) else self._header headers = ['{}{:02d}'.format(header, i) for i in range(num_col)] - return '\t'.join(headers) - - def _print_metadata(self, x, f): - f.write('{0[0]}\t{0[1]:.10f}\t{0[2]:.10f}\t{0[3]:.10f}\n'.format(x)) + return headers class ACompCor(CompCor): @@ -1274,26 +1273,28 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, elif components_criterion == -1: num_components = len(s) else: - num_components = components_criterion + num_components = int(components_criterion) if components is None: components = u[:, :num_components] metadata = { - 'mask': np.array([i] * len(s)), - 'singular_values': s, - 'variance_explained': variance_explained, - 'cumulative_variance_explained': cumulative_variance_explained + 'mask': np.array([i] * num_components), + 'singular_values': s[:num_components], + 'variance_explained': variance_explained[:num_components], + 'cumulative_variance_explained': + cumulative_variance_explained[:num_components] } else: components = np.hstack((components, u[:, :num_components])) - metadata['mask'] = np.hstack((metadata['mask'], [i] * len(s))) + metadata['mask'] = np.hstack((metadata['mask'], + [i] * num_components)) metadata['singular_values'] = ( - np.hstack((metadata['singular_values'], s))) + np.hstack((metadata['singular_values'], s[:num_components]))) metadata['variance_explained'] = ( np.hstack((metadata['variance_explained'], - variance_explained))) + variance_explained[:num_components]))) metadata['cumulative_variance_explained'] = ( np.hstack((metadata['cumulative_variance_explained'], - cumulative_variance_explained))) + cumulative_variance_explained[:num_components]))) if components is None and num_components != 0: if self.inputs.failure_mode == 'error': raise ValueError('No components found') From 037387901e825bd410acce5cdfa3baa47e637f23 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 19 Jan 2019 15:26:48 -0800 Subject: [PATCH 07/36] update autotests and make naming consistent --- nipype/algorithms/confounds.py | 6 +++--- nipype/algorithms/tests/test_auto_ACompCor.py | 3 +++ nipype/algorithms/tests/test_auto_TCompCor.py | 3 +++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 0b85e4b3c1..d946c9df5a 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1278,7 +1278,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components = u[:, :num_components] metadata = { 'mask': np.array([i] * num_components), - 'singular_values': s[:num_components], + 'singular_value': s[:num_components], 'variance_explained': variance_explained[:num_components], 'cumulative_variance_explained': cumulative_variance_explained[:num_components] @@ -1287,8 +1287,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components = np.hstack((components, u[:, :num_components])) metadata['mask'] = np.hstack((metadata['mask'], [i] * num_components)) - metadata['singular_values'] = ( - np.hstack((metadata['singular_values'], s[:num_components]))) + metadata['singular_value'] = ( + np.hstack((metadata['singular_value'], s[:num_components]))) metadata['variance_explained'] = ( np.hstack((metadata['variance_explained'], variance_explained[:num_components]))) diff --git a/nipype/algorithms/tests/test_auto_ACompCor.py b/nipype/algorithms/tests/test_auto_ACompCor.py index 235d15da9e..ae69dac4e5 100644 --- a/nipype/algorithms/tests/test_auto_ACompCor.py +++ b/nipype/algorithms/tests/test_auto_ACompCor.py @@ -15,6 +15,7 @@ def test_ACompCor_inputs(): requires=['mask_files'], xor=['merge_method'], ), + mask_names=dict(), merge_method=dict( requires=['mask_files'], xor=['mask_index'], @@ -24,6 +25,7 @@ def test_ACompCor_inputs(): realigned_file=dict(mandatory=True, ), regress_poly_degree=dict(usedefault=True, ), repetition_time=dict(), + save_metadata=dict(), save_pre_filter=dict(), use_regress_poly=dict( deprecated='0.15.0', @@ -38,6 +40,7 @@ def test_ACompCor_inputs(): def test_ACompCor_outputs(): output_map = dict( components_file=dict(), + metadata_file=dict(), pre_filter_file=dict(), ) outputs = ACompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_TCompCor.py b/nipype/algorithms/tests/test_auto_TCompCor.py index 59a5b84f76..484f7ca5f4 100644 --- a/nipype/algorithms/tests/test_auto_TCompCor.py +++ b/nipype/algorithms/tests/test_auto_TCompCor.py @@ -15,6 +15,7 @@ def test_TCompCor_inputs(): requires=['mask_files'], xor=['merge_method'], ), + mask_names=dict(), merge_method=dict( requires=['mask_files'], xor=['mask_index'], @@ -25,6 +26,7 @@ def test_TCompCor_inputs(): realigned_file=dict(mandatory=True, ), regress_poly_degree=dict(usedefault=True, ), repetition_time=dict(), + save_metadata=dict(), save_pre_filter=dict(), use_regress_poly=dict( deprecated='0.15.0', @@ -40,6 +42,7 @@ def test_TCompCor_outputs(): output_map = dict( components_file=dict(), high_variance_masks=dict(), + metadata_file=dict(), pre_filter_file=dict(), ) outputs = TCompCor.output_spec() From 2c551d0534f49fabbc03a97bb8ea7d9ac1b61465 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 20 Jan 2019 15:06:47 -0800 Subject: [PATCH 08/36] (CompCor) more intuitive interface following review from @effigies --- nipype/algorithms/confounds.py | 40 ++++++++++++++----- nipype/algorithms/tests/test_auto_ACompCor.py | 3 +- nipype/algorithms/tests/test_auto_TCompCor.py | 3 +- 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index d946c9df5a..baf96e9fd8 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -394,16 +394,22 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Float(6, usedefault=True, - desc='Number of components to return from the decomposition.' - 'If `num_components` is a positive integer, then ' - '`num_components` components will be retained. If ' - '`num_components` is a fractional value between 0 and 1, then ' + num_components = traits.Either('all', traits.Int, + xor=['variance_threshold'], + desc='Number of components to return from the decomposition. If ' + '`num_components` is `all`, then all components will be ' + 'retained.') + # 6 for BOLD, 4 for ASL + # automatically instantiated to 6 in CompCor below if neither + # `num_components` nor `variance_threshold` is defined (for + # backward compatibility) + variance_threshold = traits.Float(xor=['num_components'], + desc='Select the number of components to be returned automatically ' + 'based on their ability to explain variance in the dataset. ' + '`variance_threshold` is a fractional value between 0 and 1; ' 'the number of components retained will be equal to the minimum ' 'number of components necessary to explain the provided ' - 'fraction of variance in the masked time series. If ' - '`num_components` is -1, then all components will be retained.') - # 6 for BOLD, 4 for ASL + 'fraction of variance in the masked time series.') pre_filter = traits.Enum( 'polynomial', 'cosine', @@ -564,8 +570,20 @@ def _run_interface(self, runtime): '{} cannot detect repetition time from image - ' 'Set the repetition_time input'.format(self._header)) + if isdefined(self.inputs.variance_threshold): + components_criterion = self.inputs.variance_threshold + elif isdefined(self.inputs.num_components): + components_criterion = self.inputs.num_components + else: + components_criterion = 6 + IFLOGGER.warning('`num_components` and `variance_threshold` are ' + 'not defined. Setting number of components to 6 ' + 'for backward compatibility. Please set either ' + '`num_components` or `variance_threshold`, as ' + 'this feature may be deprecated in the future.') + components, filter_basis, metadata = compute_noise_components( - imgseries.get_data(), mask_images, self.inputs.num_components, + imgseries.get_data(), mask_images, components_criterion, self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, self.inputs.failure_mode, self.inputs.mask_names) @@ -1191,7 +1209,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, Number of noise components to return. If this is a decimal value between 0 and 1, then `create_noise_components` will instead return the smallest number of components necessary to explain the indicated - fraction of variance. If `components_criterion` is -1, then all + fraction of variance. If `components_criterion` is `all`, then all components will be returned. filter_type: str Type of filter to apply to time series before computing @@ -1218,6 +1236,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, """ components = None basis = np.array([]) + if components_criterion == 'all': + components_criterion = -1 if not mask_names: mask_names = range(len(mask_images)) for i, img in zip(mask_names, mask_images): diff --git a/nipype/algorithms/tests/test_auto_ACompCor.py b/nipype/algorithms/tests/test_auto_ACompCor.py index ae69dac4e5..95a9f51a88 100644 --- a/nipype/algorithms/tests/test_auto_ACompCor.py +++ b/nipype/algorithms/tests/test_auto_ACompCor.py @@ -20,7 +20,7 @@ def test_ACompCor_inputs(): requires=['mask_files'], xor=['mask_index'], ), - num_components=dict(usedefault=True, ), + num_components=dict(xor=['variance_threshold'], ), pre_filter=dict(usedefault=True, ), realigned_file=dict(mandatory=True, ), regress_poly_degree=dict(usedefault=True, ), @@ -31,6 +31,7 @@ def test_ACompCor_inputs(): deprecated='0.15.0', new_name='pre_filter', ), + variance_threshold=dict(xor=['num_components'], ), ) inputs = ACompCor.input_spec() diff --git a/nipype/algorithms/tests/test_auto_TCompCor.py b/nipype/algorithms/tests/test_auto_TCompCor.py index 484f7ca5f4..1e94ef4241 100644 --- a/nipype/algorithms/tests/test_auto_TCompCor.py +++ b/nipype/algorithms/tests/test_auto_TCompCor.py @@ -20,7 +20,7 @@ def test_TCompCor_inputs(): requires=['mask_files'], xor=['mask_index'], ), - num_components=dict(usedefault=True, ), + num_components=dict(xor=['variance_threshold'], ), percentile_threshold=dict(usedefault=True, ), pre_filter=dict(usedefault=True, ), realigned_file=dict(mandatory=True, ), @@ -32,6 +32,7 @@ def test_TCompCor_inputs(): deprecated='0.15.0', new_name='pre_filter', ), + variance_threshold=dict(xor=['num_components'], ), ) inputs = TCompCor.input_spec() From a53cd46c4c004f358115e3b04dcb59b7a498d5e0 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 20 Jan 2019 15:41:45 -0800 Subject: [PATCH 09/36] manually set `num_components` in test --- nipype/algorithms/tests/test_CompCor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 488ad3c960..b4c4739cfb 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -61,7 +61,7 @@ def test_compcor(self): 'aCompCor') def test_tcompcor(self): - ccinterface = TCompCor( + ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file, percentile_threshold=0.75) self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [ '0.4566907310', '0.6983205193' @@ -70,7 +70,8 @@ def test_tcompcor(self): ], ['-0.1342351356', '0.1407855119']], 'tCompCor') def test_tcompcor_no_percentile(self): - ccinterface = TCompCor(realigned_file=self.realigned_file) + ccinterface = TCompCor(num_components=6, + realigned_file=self.realigned_file) ccinterface.run() mask = nb.load('mask_000.nii.gz').get_data() @@ -160,7 +161,6 @@ def run_cc(self, assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - assert ccinterface.inputs.num_components == 6 with open(ccresult.outputs.components_file, 'r') as components_file: expected_n_components = min(ccinterface.inputs.num_components, From b811d47a07eb729cc83b983cab6e9981278ae3c4 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 20 Jan 2019 15:41:45 -0800 Subject: [PATCH 10/36] manually set `num_components` in test --- nipype/algorithms/tests/test_CompCor.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 488ad3c960..b20bc3d2c4 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -48,12 +48,14 @@ def test_compcor(self): self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0), expected_components) self.run_cc( ACompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, @@ -61,7 +63,7 @@ def test_compcor(self): 'aCompCor') def test_tcompcor(self): - ccinterface = TCompCor( + ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file, percentile_threshold=0.75) self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [ '0.4566907310', '0.6983205193' @@ -70,7 +72,8 @@ def test_tcompcor(self): ], ['-0.1342351356', '0.1407855119']], 'tCompCor') def test_tcompcor_no_percentile(self): - ccinterface = TCompCor(realigned_file=self.realigned_file) + ccinterface = TCompCor(num_components=6, + realigned_file=self.realigned_file) ccinterface.run() mask = nb.load('mask_000.nii.gz').get_data() @@ -80,6 +83,7 @@ def test_tcompcor_no_percentile(self): def test_compcor_no_regress_poly(self): self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, @@ -160,7 +164,6 @@ def run_cc(self, assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - assert ccinterface.inputs.num_components == 6 with open(ccresult.outputs.components_file, 'r') as components_file: expected_n_components = min(ccinterface.inputs.num_components, From 577e3957ee11039664b2f3ccdf68fedcd564c928 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 20 Jan 2019 18:48:25 -0800 Subject: [PATCH 11/36] add unit test for variance_threshold condition --- nipype/algorithms/tests/test_CompCor.py | 56 ++++++++++++++++++++++--- 1 file changed, 50 insertions(+), 6 deletions(-) diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index b20bc3d2c4..5fbe641a0b 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os +import re import nibabel as nb import numpy as np @@ -62,6 +63,30 @@ def test_compcor(self): components_file='acc_components_file'), expected_components, 'aCompCor') + def test_compcor_variance_threshold_and_metadata(self): + expected_components = [['-0.2027150345', '-0.4954813834'], + ['0.2565929051', '0.7866217875'], + ['-0.3550986008', '-0.0089784905'], + ['0.7512786244', '-0.3599828482'], + ['-0.4500578942', '0.0778209345']] + expected_metadata = { + 'component': 'CompCor00', + 'mask': '0', + 'singular_value': '4.0720553036', + 'variance_explained': '0.5527211465', + 'cumulative_variance_explained': '0.5527211465' + } + ccinterface = CompCor( + variance_threshold=0.7, + realigned_file=self.realigned_file, + mask_files=self.mask_files, + mask_index=1, + save_metadata=True) + self.run_cc(ccinterface=ccinterface, + expected_components=expected_components, + expected_n_components=2, + expected_metadata=expected_metadata) + def test_tcompcor(self): ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file, percentile_threshold=0.75) @@ -155,7 +180,9 @@ def test_tcompcor_multi_mask_no_index(self): def run_cc(self, ccinterface, expected_components, - expected_header='CompCor'): + expected_header='CompCor', + expected_n_components=None, + expected_metadata=None): # run ccresult = ccinterface.run() @@ -166,10 +193,12 @@ def run_cc(self, assert os.path.getsize(expected_file) > 0 with open(ccresult.outputs.components_file, 'r') as components_file: - expected_n_components = min(ccinterface.inputs.num_components, - self.fake_data.shape[3]) + if expected_n_components is None: + expected_n_components = min(ccinterface.inputs.num_components, + self.fake_data.shape[3]) - components_data = [line.split('\t') for line in components_file] + components_data = [re.sub('\n', '', line).split('\t') + for line in components_file] # the first item will be '#', we can throw it out header = components_data.pop(0) @@ -183,9 +212,24 @@ def run_cc(self, num_got_timepoints = len(components_data) assert num_got_timepoints == self.fake_data.shape[3] for index, timepoint in enumerate(components_data): - assert (len(timepoint) == ccinterface.inputs.num_components - or len(timepoint) == self.fake_data.shape[3]) + assert (len(timepoint) == expected_n_components) assert timepoint[:2] == expected_components[index] + + if ccinterface.inputs.save_metadata: + expected_metadata_file = ( + ccinterface._list_outputs()['metadata_file']) + assert ccresult.outputs.metadata_file == expected_metadata_file + assert os.path.exists(expected_metadata_file) + assert os.path.getsize(expected_metadata_file) > 0 + + with open(ccresult.outputs.metadata_file, 'r') as metadata_file: + components_metadata = [re.sub('\n', '', line).split('\t') + for line in metadata_file] + components_metadata = {i: j for i, j in + zip(components_metadata[0], + components_metadata[1])} + assert components_metadata == expected_metadata + return ccresult @staticmethod From 66c754079b11e7b5d54d1b2c331f0f282971ba79 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 20 Jan 2019 21:24:43 -0800 Subject: [PATCH 12/36] provide mask name to circumvent test failure --- nipype/algorithms/tests/test_CompCor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 5fbe641a0b..53c0959664 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -71,7 +71,7 @@ def test_compcor_variance_threshold_and_metadata(self): ['-0.4500578942', '0.0778209345']] expected_metadata = { 'component': 'CompCor00', - 'mask': '0', + 'mask': 'mask', 'singular_value': '4.0720553036', 'variance_explained': '0.5527211465', 'cumulative_variance_explained': '0.5527211465' @@ -80,6 +80,7 @@ def test_compcor_variance_threshold_and_metadata(self): variance_threshold=0.7, realigned_file=self.realigned_file, mask_files=self.mask_files, + mask_names=['mask'], mask_index=1, save_metadata=True) self.run_cc(ccinterface=ccinterface, From 0bb009616be85caa154301ae170806542cf388d1 Mon Sep 17 00:00:00 2001 From: rciric Date: Mon, 21 Jan 2019 00:29:56 -0800 Subject: [PATCH 13/36] (CompCor) try using an OrderedDict for metadata --- nipype/algorithms/confounds.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index baf96e9fd8..657a201de5 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -8,6 +8,11 @@ absolute_import) from builtins import range +# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict +from future import standard_library +standard_library.install_aliases() +from collections import OrderedDict + import os import os.path as op @@ -1230,7 +1235,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, Numpy array containing the requested set of noise components basis: numpy array Numpy array containing the (non-constant) filter regressors - metadata: dict(numpy array) + metadata: OrderedDict{str: numpy array} Dictionary of eigenvalues, fractional explained variances, and cumulative explained variances. """ @@ -1296,13 +1301,12 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, num_components = int(components_criterion) if components is None: components = u[:, :num_components] - metadata = { - 'mask': np.array([i] * num_components), - 'singular_value': s[:num_components], - 'variance_explained': variance_explained[:num_components], - 'cumulative_variance_explained': - cumulative_variance_explained[:num_components] - } + metadata = OrderedDict() + metadata['mask'] = np.array([i] * num_components) + metadata['singular_value'] = s[:num_components] + metadata['variance_explained'] = variance_explained[:num_components] + metadata['cumulative_variance_explained'] = ( + cumulative_variance_explained[:num_components]) else: components = np.hstack((components, u[:, :num_components])) metadata['mask'] = np.hstack((metadata['mask'], From 94bea4a14284801270c2e778e2950270a93b9d98 Mon Sep 17 00:00:00 2001 From: rciric Date: Sun, 3 Feb 2019 23:49:13 -0800 Subject: [PATCH 14/36] first-pass refactor CompCor to SimpleInterface --- nipype/algorithms/confounds.py | 46 +++++++++++++++------------------- 1 file changed, 20 insertions(+), 26 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 657a201de5..712996f0f0 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -24,7 +24,8 @@ from ..external.due import BibTeX from ..interfaces.base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath) + InputMultiPath, OutputMultiPath, + SimpleInterface) from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params @@ -462,7 +463,7 @@ class CompCorOutputSpec(TraitedSpec): metadata_file = File(desc='text file containing component metadata') -class CompCor(BaseInterface): +class CompCor(SimpleInterface): """ Interface with core CompCor computation, used in aCompCor and tCompCor @@ -609,9 +610,16 @@ def _run_interface(self, runtime): delimiter='\t', header='\t'.join(components_header), comments='') + self._results['components_file'] = components_file - if self.inputs.pre_filter and self.inputs.save_pre_filter: - pre_filter_file = self._list_outputs()['pre_filter_file'] + save_pre_filter = self.inputs.save_pre_filter + if save_pre_filter: + if isinstance(save_pre_filter, bool): + pre_filter_file = os.path.abspath('pre_filter.tsv') + else: + pre_filter_file = save_pre_filter + self._results['pre_filter_file'] = pre_filter_file + if self.inputs.pre_filter and save_pre_filter: ftype = { 'polynomial': 'Legendre', 'cosine': 'Cosine' @@ -638,8 +646,13 @@ def _run_interface(self, runtime): header='\t'.join(header), comments='') - if self.inputs.save_metadata: - metadata_file = self._list_outputs()['metadata_file'] + save_metadata = self.inputs.save_metadata + if save_metadata: + if isinstance(save_metadata, bool): + metadata_file = os.path.abspath('component_metadata.tsv') + else: + metadata_file = save_metadata + self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) @@ -652,25 +665,6 @@ def _run_interface(self, runtime): def _process_masks(self, mask_images, timeseries=None): return mask_images - def _list_outputs(self): - outputs = self._outputs().get() - outputs['components_file'] = os.path.abspath( - self.inputs.components_file) - - save_pre_filter = self.inputs.save_pre_filter - if save_pre_filter: - if isinstance(save_pre_filter, bool): - save_pre_filter = os.path.abspath('pre_filter.tsv') - outputs['pre_filter_file'] = save_pre_filter - - save_metadata = self.inputs.save_metadata - if save_metadata: - if isinstance(save_metadata, bool): - save_metadata = os.path.abspath('component_metadata.tsv') - outputs['metadata_file'] = save_metadata - - return outputs - def _make_headers(self, num_col): header = self.inputs.header_prefix if \ isdefined(self.inputs.header_prefix) else self._header @@ -1229,7 +1223,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, period_cut: minimum period (in sec) for DCT high-pass filter repetition_time: time (in sec) between volume acquisitions - Outputs + Returns ------- components: numpy array Numpy array containing the requested set of noise components From addb0e9ece8d0563e2e6c3af2e389bf011dbf839 Mon Sep 17 00:00:00 2001 From: rciric Date: Tue, 5 Feb 2019 23:06:42 -0800 Subject: [PATCH 15/36] return metadata for all components regardless of retention criterion --- nipype/algorithms/confounds.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 712996f0f0..0039b3a6e3 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -652,11 +652,15 @@ def _run_interface(self, runtime): metadata_file = os.path.abspath('component_metadata.tsv') else: metadata_file = save_metadata + components_names = np.array(dtype='object_', + object=['dropped' for i in range(len(metadata['mask']))]) + components_names[np.where(metadata['retained'])] = ( + components_header) self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) - for i in zip(components_header, *metadata.values()): + for i in zip(components_names, *metadata.values()): f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' '{0[3]:.10f}\t{0[4]:.10f}\n'.format(i)) @@ -1296,23 +1300,30 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, if components is None: components = u[:, :num_components] metadata = OrderedDict() - metadata['mask'] = np.array([i] * num_components) - metadata['singular_value'] = s[:num_components] - metadata['variance_explained'] = variance_explained[:num_components] + metadata['mask'] = np.array([i] * len(s)) + metadata['singular_value'] = s + metadata['variance_explained'] = variance_explained metadata['cumulative_variance_explained'] = ( - cumulative_variance_explained[:num_components]) + cumulative_variance_explained) + metadata['retained'] = np.array( + [True if i < num_components + else False for i in range(len(s))], dtype='bool') else: components = np.hstack((components, u[:, :num_components])) metadata['mask'] = np.hstack((metadata['mask'], - [i] * num_components)) + [i] * len(s))) metadata['singular_value'] = ( - np.hstack((metadata['singular_value'], s[:num_components]))) + np.hstack((metadata['singular_value'], s))) metadata['variance_explained'] = ( np.hstack((metadata['variance_explained'], - variance_explained[:num_components]))) + variance_explained))) metadata['cumulative_variance_explained'] = ( np.hstack((metadata['cumulative_variance_explained'], - cumulative_variance_explained[:num_components]))) + cumulative_variance_explained))) + metadata['retained'] = np.hstack((metadata['retained'], + [True if i < num_components + else False + for i in range(len(s))])) if components is None and num_components != 0: if self.inputs.failure_mode == 'error': raise ValueError('No components found') From b04c9ca59ebd3a69476268c797d7d79087cc8cb6 Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 8 Feb 2019 11:26:38 -0800 Subject: [PATCH 16/36] @oesteban: limit np array use, clean up conditionals, remove invalid obj --- nipype/algorithms/confounds.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 0039b3a6e3..d72d25c41d 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -400,7 +400,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Either('all', traits.Int, + num_components = traits.Either('all', traits.Range(low=1), xor=['variance_threshold'], desc='Number of components to return from the decomposition. If ' '`num_components` is `all`, then all components will be ' @@ -1280,7 +1280,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, try: u, s, _ = fallback_svd(M, full_matrices=False) except np.linalg.LinAlgError: - if self.inputs.failure_mode == 'error': + if failure_mode == 'error': raise if components_criterion >= 1: u = np.empty((M.shape[0], components_criterion), @@ -1288,26 +1288,28 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, else: continue - variance_explained = np.array([value**2/np.sum(s**2) for value in s]) + variance_explained = [value ** 2 / np.sum(s ** 2) for value in s] cumulative_variance_explained = np.cumsum(variance_explained) + + num_components = int(components_criterion) if 0 < components_criterion < 1: num_components = np.searchsorted(cumulative_variance_explained, components_criterion) + 1 elif components_criterion == -1: num_components = len(s) - else: - num_components = int(components_criterion) + + num_components = int(num_components) + # check whether num_components == 0, break if so if components is None: components = u[:, :num_components] metadata = OrderedDict() - metadata['mask'] = np.array([i] * len(s)) + metadata['mask'] = [i] * len(s) metadata['singular_value'] = s metadata['variance_explained'] = variance_explained metadata['cumulative_variance_explained'] = ( cumulative_variance_explained) - metadata['retained'] = np.array( - [True if i < num_components - else False for i in range(len(s))], dtype='bool') + metadata['retained'] = [ + i < num_components for i in range(len(s))] else: components = np.hstack((components, u[:, :num_components])) metadata['mask'] = np.hstack((metadata['mask'], @@ -1324,8 +1326,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, [True if i < num_components else False for i in range(len(s))])) - if components is None and num_components != 0: - if self.inputs.failure_mode == 'error': + if components is None: + if failure_mode == 'error': raise ValueError('No components found') components = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan From e957e87ddc7b741b46991e761f2f132ac674f86b Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 9 Feb 2019 02:29:28 -0800 Subject: [PATCH 17/36] less np array use; unique names for dropped components --- nipype/algorithms/confounds.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index d72d25c41d..6c6b7cd513 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -652,10 +652,13 @@ def _run_interface(self, runtime): metadata_file = os.path.abspath('component_metadata.tsv') else: metadata_file = save_metadata - components_names = np.array(dtype='object_', - object=['dropped' for i in range(len(metadata['mask']))]) - components_names[np.where(metadata['retained'])] = ( - components_header) + components_names = np.empty(len(metadata['mask']), + dtype='object_') + retained = np.where(metadata['retained']) + not_retained = np.where(np.logical_not(metadata['retained'])) + components_names[retained] = components_header + components_names[not_retained] = ([ + 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', @@ -1288,7 +1291,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, else: continue - variance_explained = [value ** 2 / np.sum(s ** 2) for value in s] + variance_explained = (s ** 2) / np.sum(s ** 2) cumulative_variance_explained = np.cumsum(variance_explained) num_components = int(components_criterion) @@ -1299,7 +1302,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, num_components = len(s) num_components = int(num_components) - # check whether num_components == 0, break if so + if num_components == 0: + break if components is None: components = u[:, :num_components] metadata = OrderedDict() @@ -1308,12 +1312,10 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, metadata['variance_explained'] = variance_explained metadata['cumulative_variance_explained'] = ( cumulative_variance_explained) - metadata['retained'] = [ - i < num_components for i in range(len(s))] + metadata['retained'] = [i < num_components for i in range(len(s))] else: components = np.hstack((components, u[:, :num_components])) - metadata['mask'] = np.hstack((metadata['mask'], - [i] * len(s))) + metadata['mask'] = metadata['mask'] + [i] * len(s) metadata['singular_value'] = ( np.hstack((metadata['singular_value'], s))) metadata['variance_explained'] = ( @@ -1322,15 +1324,13 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, metadata['cumulative_variance_explained'] = ( np.hstack((metadata['cumulative_variance_explained'], cumulative_variance_explained))) - metadata['retained'] = np.hstack((metadata['retained'], - [True if i < num_components - else False - for i in range(len(s))])) + metadata['retained'] = (metadata['retained'] + + [i < num_components + for i in range(len(s))]) if components is None: if failure_mode == 'error': raise ValueError('No components found') - components = np.ones((M.shape[0], num_components), - dtype=np.float32) * np.nan + components = np.full((M.shape[0], num_components), np.NaN) return components, basis, metadata From 797801e92731b5564b9d673ec2de3585587b6898 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 9 Feb 2019 04:07:58 -0800 Subject: [PATCH 18/36] ensure absolute path to components file --- nipype/algorithms/confounds.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 6c6b7cd513..6a03d5830c 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -610,7 +610,8 @@ def _run_interface(self, runtime): delimiter='\t', header='\t'.join(components_header), comments='') - self._results['components_file'] = components_file + self._results['components_file'] = os.path.abspath( + self.inputs.components_file) save_pre_filter = self.inputs.save_pre_filter if save_pre_filter: From 67a3276aaaf0ca385c50f4ab23b26f7479deb120 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 9 Feb 2019 04:31:28 -0800 Subject: [PATCH 19/36] (CompCor) try BaseInterface --- nipype/algorithms/confounds.py | 46 +++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 6a03d5830c..db5e3cb00a 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -24,8 +24,7 @@ from ..external.due import BibTeX from ..interfaces.base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath, - SimpleInterface) + InputMultiPath, OutputMultiPath) from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params @@ -463,7 +462,7 @@ class CompCorOutputSpec(TraitedSpec): metadata_file = File(desc='text file containing component metadata') -class CompCor(SimpleInterface): +class CompCor(BaseInterface): """ Interface with core CompCor computation, used in aCompCor and tCompCor @@ -610,17 +609,9 @@ def _run_interface(self, runtime): delimiter='\t', header='\t'.join(components_header), comments='') - self._results['components_file'] = os.path.abspath( - self.inputs.components_file) - save_pre_filter = self.inputs.save_pre_filter - if save_pre_filter: - if isinstance(save_pre_filter, bool): - pre_filter_file = os.path.abspath('pre_filter.tsv') - else: - pre_filter_file = save_pre_filter - self._results['pre_filter_file'] = pre_filter_file - if self.inputs.pre_filter and save_pre_filter: + if self.inputs.pre_filter and self.inputs.save_pre_filter: + pre_filter_file = self._list_outputs()['pre_filter_file'] ftype = { 'polynomial': 'Legendre', 'cosine': 'Cosine' @@ -647,12 +638,8 @@ def _run_interface(self, runtime): header='\t'.join(header), comments='') - save_metadata = self.inputs.save_metadata - if save_metadata: - if isinstance(save_metadata, bool): - metadata_file = os.path.abspath('component_metadata.tsv') - else: - metadata_file = save_metadata + if self.inputs.save_metadata: + metadata_file = self._list_outputs()['metadata_file'] components_names = np.empty(len(metadata['mask']), dtype='object_') retained = np.where(metadata['retained']) @@ -660,7 +647,6 @@ def _run_interface(self, runtime): components_names[retained] = components_header components_names[not_retained] = ([ 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) - self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) @@ -673,6 +659,26 @@ def _run_interface(self, runtime): def _process_masks(self, mask_images, timeseries=None): return mask_images + def _list_outputs(self): + + outputs = self._outputs().get() + outputs['components_file'] = os.path.abspath( + self.inputs.components_file) + + save_pre_filter = self.inputs.save_pre_filter + if save_pre_filter: + if isinstance(save_pre_filter, bool): + save_pre_filter = os.path.abspath('pre_filter.tsv') + outputs['pre_filter_file'] = save_pre_filter + + save_metadata = self.inputs.save_metadata + if save_metadata: + if isinstance(save_metadata, bool): + save_metadata = os.path.abspath('component_metadata.tsv') + outputs['metadata_file'] = save_metadata + + return outputs + def _make_headers(self, num_col): header = self.inputs.header_prefix if \ isdefined(self.inputs.header_prefix) else self._header From fe430f59330ff7b238cc3d5a85bcec412f627b1d Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 9 Feb 2019 04:45:52 -0800 Subject: [PATCH 20/36] ensure absolute path to components file --- nipype/algorithms/confounds.py | 46 +++++++++++++++------------------- 1 file changed, 20 insertions(+), 26 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index db5e3cb00a..6a03d5830c 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -24,7 +24,8 @@ from ..external.due import BibTeX from ..interfaces.base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath) + InputMultiPath, OutputMultiPath, + SimpleInterface) from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params @@ -462,7 +463,7 @@ class CompCorOutputSpec(TraitedSpec): metadata_file = File(desc='text file containing component metadata') -class CompCor(BaseInterface): +class CompCor(SimpleInterface): """ Interface with core CompCor computation, used in aCompCor and tCompCor @@ -609,9 +610,17 @@ def _run_interface(self, runtime): delimiter='\t', header='\t'.join(components_header), comments='') + self._results['components_file'] = os.path.abspath( + self.inputs.components_file) - if self.inputs.pre_filter and self.inputs.save_pre_filter: - pre_filter_file = self._list_outputs()['pre_filter_file'] + save_pre_filter = self.inputs.save_pre_filter + if save_pre_filter: + if isinstance(save_pre_filter, bool): + pre_filter_file = os.path.abspath('pre_filter.tsv') + else: + pre_filter_file = save_pre_filter + self._results['pre_filter_file'] = pre_filter_file + if self.inputs.pre_filter and save_pre_filter: ftype = { 'polynomial': 'Legendre', 'cosine': 'Cosine' @@ -638,8 +647,12 @@ def _run_interface(self, runtime): header='\t'.join(header), comments='') - if self.inputs.save_metadata: - metadata_file = self._list_outputs()['metadata_file'] + save_metadata = self.inputs.save_metadata + if save_metadata: + if isinstance(save_metadata, bool): + metadata_file = os.path.abspath('component_metadata.tsv') + else: + metadata_file = save_metadata components_names = np.empty(len(metadata['mask']), dtype='object_') retained = np.where(metadata['retained']) @@ -647,6 +660,7 @@ def _run_interface(self, runtime): components_names[retained] = components_header components_names[not_retained] = ([ 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) + self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) @@ -659,26 +673,6 @@ def _run_interface(self, runtime): def _process_masks(self, mask_images, timeseries=None): return mask_images - def _list_outputs(self): - - outputs = self._outputs().get() - outputs['components_file'] = os.path.abspath( - self.inputs.components_file) - - save_pre_filter = self.inputs.save_pre_filter - if save_pre_filter: - if isinstance(save_pre_filter, bool): - save_pre_filter = os.path.abspath('pre_filter.tsv') - outputs['pre_filter_file'] = save_pre_filter - - save_metadata = self.inputs.save_metadata - if save_metadata: - if isinstance(save_metadata, bool): - save_metadata = os.path.abspath('component_metadata.tsv') - outputs['metadata_file'] = save_metadata - - return outputs - def _make_headers(self, num_col): header = self.inputs.header_prefix if \ isdefined(self.inputs.header_prefix) else self._header From 1625bdbf66b1ae20565e6261334c6b57630c7785 Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 15 Feb 2019 10:58:46 -0800 Subject: [PATCH 21/36] update per @oesteban 's review --- nipype/algorithms/confounds.py | 101 +++++++++++++++------------------ 1 file changed, 46 insertions(+), 55 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 6a03d5830c..b8d344ab03 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -8,13 +8,9 @@ absolute_import) from builtins import range -# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict -from future import standard_library -standard_library.install_aliases() -from collections import OrderedDict - import os import os.path as op +from collections import OrderedDict import nibabel as nb import numpy as np @@ -615,44 +611,41 @@ def _run_interface(self, runtime): save_pre_filter = self.inputs.save_pre_filter if save_pre_filter: - if isinstance(save_pre_filter, bool): + self._results['pre_filter_file'] = save_pre_filter + if save_pre_filter is True: pre_filter_file = os.path.abspath('pre_filter.tsv') - else: - pre_filter_file = save_pre_filter - self._results['pre_filter_file'] = pre_filter_file - if self.inputs.pre_filter and save_pre_filter: - ftype = { - 'polynomial': 'Legendre', - 'cosine': 'Cosine' - }[self.inputs.pre_filter] - ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 - header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] - if skip_vols: - old_basis = filter_basis - # nrows defined above - filter_basis = np.zeros( - (nrows, ncols + skip_vols), dtype=filter_basis.dtype) - if old_basis.size > 0: - filter_basis[skip_vols:, :ncols] = old_basis - filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) - header.extend([ - 'NonSteadyStateOutlier{:02d}'.format(i) - for i in range(skip_vols) - ]) - np.savetxt( - pre_filter_file, - filter_basis, - fmt=b'%.10f', - delimiter='\t', - header='\t'.join(header), - comments='') - - save_metadata = self.inputs.save_metadata - if save_metadata: - if isinstance(save_metadata, bool): + if self.inputs.pre_filter: + ftype = { + 'polynomial': 'Legendre', + 'cosine': 'Cosine' + }[self.inputs.pre_filter] + ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 + header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] + if skip_vols: + old_basis = filter_basis + # nrows defined above + filter_basis = np.zeros( + (nrows, ncols + skip_vols), dtype=filter_basis.dtype) + if old_basis.size > 0: + filter_basis[skip_vols:, :ncols] = old_basis + filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) + header.extend([ + 'NonSteadyStateOutlier{:02d}'.format(i) + for i in range(skip_vols) + ]) + np.savetxt( + self._results['pre_filter_file'], + filter_basis, + fmt=b'%.10f', + delimiter='\t', + header='\t'.join(header), + comments='') + + metadata_file = self.inputs.save_metadata + if metadata_file: + self._results['metadata_file'] = metadata_file + if metadata_file is True: metadata_file = os.path.abspath('component_metadata.tsv') - else: - metadata_file = save_metadata components_names = np.empty(len(metadata['mask']), dtype='object_') retained = np.where(metadata['retained']) @@ -660,7 +653,6 @@ def _run_interface(self, runtime): components_names[retained] = components_header components_names[not_retained] = ([ 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) - self._results['metadata_file'] = metadata_file with open(metadata_file, 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) @@ -1200,7 +1192,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): def compute_noise_components(imgseries, mask_images, components_criterion=0.5, filter_type=False, degree=0, period_cut=128, repetition_time=None, failure_mode='error', - mask_names=''): + mask_names=None): """Compute the noise components from the imgseries for each mask Parameters @@ -1245,9 +1237,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, basis = np.array([]) if components_criterion == 'all': components_criterion = -1 - if not mask_names: - mask_names = range(len(mask_images)) - for i, img in zip(mask_names, mask_images): + mask_names = mask_names or range(len(mask_images)) + for name, img in zip(mask_names, mask_images): mask = img.get_data().astype(np.bool).squeeze() if imgseries.shape[:3] != mask.shape: raise ValueError( @@ -1267,20 +1258,20 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, voxel_timecourses, repetition_time, period_cut) elif filter_type in ('polynomial', False): # from paper: - # "The constant and linear trends of the columns in the matrix M - # were removed [prior to ...]" + # "The constant and linear trends of the columns in the matrix M were + # removed [prior to ...]" voxel_timecourses, basis = regress_poly(degree, voxel_timecourses) - # "Voxel time series from the noise ROI (either anatomical or tSTD) - # were placed in a matrix M of size Nxm, with time along the row - # dimension and voxels along the column dimension." + # "Voxel time series from the noise ROI (either anatomical or tSTD) were + # placed in a matrix M of size Nxm, with time along the row dimension + # and voxels along the column dimension." M = voxel_timecourses.T # "[... were removed] prior to column-wise variance normalization." M = M / _compute_tSTD(M, 1.) - # "The covariance matrix C = MMT was constructed and decomposed into - # its principal components using a singular value decomposition." + # "The covariance matrix C = MMT was constructed and decomposed into its + # principal components using a singular value decomposition." try: u, s, _ = fallback_svd(M, full_matrices=False) except np.linalg.LinAlgError: @@ -1308,7 +1299,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, if components is None: components = u[:, :num_components] metadata = OrderedDict() - metadata['mask'] = [i] * len(s) + metadata['mask'] = [name] * len(s) metadata['singular_value'] = s metadata['variance_explained'] = variance_explained metadata['cumulative_variance_explained'] = ( @@ -1316,7 +1307,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, metadata['retained'] = [i < num_components for i in range(len(s))] else: components = np.hstack((components, u[:, :num_components])) - metadata['mask'] = metadata['mask'] + [i] * len(s) + metadata['mask'] = metadata['mask'] + [name] * len(s) metadata['singular_value'] = ( np.hstack((metadata['singular_value'], s))) metadata['variance_explained'] = ( From 9afb3f5d92afe26676c70c28c049fda701ec1397 Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 15 Feb 2019 11:35:01 -0800 Subject: [PATCH 22/36] assign output to _results --- nipype/algorithms/confounds.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index b8d344ab03..b02a7b1201 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -645,7 +645,8 @@ def _run_interface(self, runtime): if metadata_file: self._results['metadata_file'] = metadata_file if metadata_file is True: - metadata_file = os.path.abspath('component_metadata.tsv') + self._results['metadata_file'] = ( + os.path.abspath('component_metadata.tsv')) components_names = np.empty(len(metadata['mask']), dtype='object_') retained = np.where(metadata['retained']) From 689d064bf42b3660e49175ab1b79adc028edbdf8 Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 15 Feb 2019 11:43:44 -0800 Subject: [PATCH 23/36] assign output to _results --- nipype/algorithms/confounds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index b02a7b1201..d7f38176cb 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -613,7 +613,7 @@ def _run_interface(self, runtime): if save_pre_filter: self._results['pre_filter_file'] = save_pre_filter if save_pre_filter is True: - pre_filter_file = os.path.abspath('pre_filter.tsv') + self._results['pre_filter_file'] = os.path.abspath('pre_filter.tsv') if self.inputs.pre_filter: ftype = { 'polynomial': 'Legendre', @@ -654,7 +654,7 @@ def _run_interface(self, runtime): components_names[retained] = components_header components_names[not_retained] = ([ 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) - with open(metadata_file, 'w') as f: + with open(self._results['metadata_file'], 'w') as f: f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', *list(metadata.keys()))) for i in zip(components_names, *metadata.values()): From f390bc64ca0db0a2a320f1f97a4e9a62b2202f3e Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 15 Feb 2019 17:51:22 -0800 Subject: [PATCH 24/36] some fixes --- nipype/algorithms/confounds.py | 126 +++++++++++++++++---------------- 1 file changed, 66 insertions(+), 60 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index d7f38176cb..871a512b34 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -388,7 +388,8 @@ class CompCorInputSpec(BaseInterfaceInputSpec): requires=['mask_files'], desc=('Position of mask in `mask_files` to use - ' 'first is the default.')) - mask_names = traits.List(traits.Str, + mask_names = traits.List( + traits.Str, desc='Names for provided masks (for printing into metadata). ' 'If provided, it must be as long as the final mask list ' '(after any merge and indexing operations).') @@ -396,16 +397,17 @@ class CompCorInputSpec(BaseInterfaceInputSpec): 'components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Either('all', traits.Range(low=1), - xor=['variance_threshold'], + num_components = traits.Either( + 'all', traits.Range(low=1), xor=['variance_threshold'], desc='Number of components to return from the decomposition. If ' '`num_components` is `all`, then all components will be ' 'retained.') - # 6 for BOLD, 4 for ASL - # automatically instantiated to 6 in CompCor below if neither - # `num_components` nor `variance_threshold` is defined (for - # backward compatibility) - variance_threshold = traits.Float(xor=['num_components'], + # 6 for BOLD, 4 for ASL + # automatically instantiated to 6 in CompCor below if neither + # `num_components` nor `variance_threshold` is defined (for + # backward compatibility) + variance_threshold = traits.Range( + low=0.0, high=1.0, exclude_low=True, exclude_high=True, xor=['num_components'], desc='Select the number of components to be returned automatically ' 'based on their ability to explain variance in the dataset. ' '`variance_threshold` is a fractional value between 0 and 1; ' @@ -438,9 +440,11 @@ class CompCorInputSpec(BaseInterfaceInputSpec): desc='Repetition time (TR) of series - derived from image header if ' 'unspecified') save_pre_filter = traits.Either( - traits.Bool, File, desc='Save pre-filter basis as text file') + traits.Bool, File, default=False, usedefault=True, + desc='Save pre-filter basis as text file') save_metadata = traits.Either( - traits.Bool, File, desc='Save component metadata as text file') + traits.Bool, File, default=False, usedefault=True, + desc='Save component metadata as text file') ignore_initial_volumes = traits.Range( low=0, usedefault=True, @@ -497,20 +501,20 @@ class CompCor(SimpleInterface): input_spec = CompCorInputSpec output_spec = CompCorOutputSpec references_ = [{ + 'tags': ['method', 'implementation'], 'entry': - BibTeX( - "@article{compcor_2007," - "title = {A component based noise correction method (CompCor) for BOLD and perfusion based}," - "volume = {37}," - "number = {1}," - "doi = {10.1016/j.neuroimage.2007.04.042}," - "urldate = {2016-08-13}," - "journal = {NeuroImage}," - "author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}," - "year = {2007}," - "pages = {90-101},}"), - 'tags': ['method', 'implementation'] - }] + BibTeX("""\ +@article{compcor_2007, + title = {A component based noise correction method (CompCor) for BOLD and perfusion based}, + volume = {37}, + number = {1}, + doi = {10.1016/j.neuroimage.2007.04.042}, + urldate = {2016-08-13}, + journal = {NeuroImage}, + author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}, + year = {2007}, + pages = {90-101} +}""")}] def __init__(self, *args, **kwargs): ''' exactly the same as compcor except the header ''' @@ -606,57 +610,60 @@ def _run_interface(self, runtime): delimiter='\t', header='\t'.join(components_header), comments='') - self._results['components_file'] = os.path.abspath( - self.inputs.components_file) + self._results['components_file'] = os.path.join( + runtime.cwd, self.inputs.components_file) + + save_pre_filter = False + if self.inputs.pre_filter in ['polynomial', 'cosine']: + save_pre_filter = self.inputs.save_pre_filter - save_pre_filter = self.inputs.save_pre_filter if save_pre_filter: self._results['pre_filter_file'] = save_pre_filter if save_pre_filter is True: - self._results['pre_filter_file'] = os.path.abspath('pre_filter.tsv') - if self.inputs.pre_filter: - ftype = { - 'polynomial': 'Legendre', - 'cosine': 'Cosine' - }[self.inputs.pre_filter] - ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 - header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] - if skip_vols: - old_basis = filter_basis - # nrows defined above - filter_basis = np.zeros( - (nrows, ncols + skip_vols), dtype=filter_basis.dtype) - if old_basis.size > 0: - filter_basis[skip_vols:, :ncols] = old_basis - filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) - header.extend([ - 'NonSteadyStateOutlier{:02d}'.format(i) - for i in range(skip_vols) - ]) - np.savetxt( - self._results['pre_filter_file'], - filter_basis, - fmt=b'%.10f', - delimiter='\t', - header='\t'.join(header), - comments='') + self._results['pre_filter_file'] = os.path.join( + runtime.cwd, 'pre_filter.tsv') + + ftype = { + 'polynomial': 'Legendre', + 'cosine': 'Cosine' + }[self.inputs.pre_filter] + ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 + header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] + if skip_vols: + old_basis = filter_basis + # nrows defined above + filter_basis = np.zeros( + (nrows, ncols + skip_vols), dtype=filter_basis.dtype) + if old_basis.size > 0: + filter_basis[skip_vols:, :ncols] = old_basis + filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) + header.extend([ + 'NonSteadyStateOutlier{:02d}'.format(i) + for i in range(skip_vols) + ]) + np.savetxt( + self._results['pre_filter_file'], + filter_basis, + fmt=b'%.10f', + delimiter='\t', + header='\t'.join(header), + comments='') metadata_file = self.inputs.save_metadata if metadata_file: self._results['metadata_file'] = metadata_file if metadata_file is True: self._results['metadata_file'] = ( - os.path.abspath('component_metadata.tsv')) + os.path.join(runtime.cwd, 'component_metadata.tsv')) components_names = np.empty(len(metadata['mask']), - dtype='object_') + dtype='object_') retained = np.where(metadata['retained']) not_retained = np.where(np.logical_not(metadata['retained'])) components_names[retained] = components_header components_names[not_retained] = ([ 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) with open(self._results['metadata_file'], 'w') as f: - f.write('{}\t{}\t{}\t{}\t{}\n'.format('component', - *list(metadata.keys()))) + f.write('\t'.join(['component'] + list(metadata.keys())) + '\n') for i in zip(components_names, *metadata.values()): f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' '{0[3]:.10f}\t{0[4]:.10f}\n'.format(i)) @@ -1317,9 +1324,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, metadata['cumulative_variance_explained'] = ( np.hstack((metadata['cumulative_variance_explained'], cumulative_variance_explained))) - metadata['retained'] = (metadata['retained'] - + [i < num_components - for i in range(len(s))]) + metadata['retained'] = ( + metadata['retained'] + [i < num_components for i in range(len(s))]) if components is None: if failure_mode == 'error': raise ValueError('No components found') From ad3d4401b35086549c954d341c4050c630c5e79d Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 15 Feb 2019 18:19:19 -0800 Subject: [PATCH 25/36] testing pickling of variance_threshold --- nipype/workflows/rsfmri/fsl/resting.py | 1 - .../rsfmri/fsl/tests/test_resting.py | 24 ++++++++++--------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index 12d44a83cf..176a0ed6f7 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -3,7 +3,6 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import (print_function, division, unicode_literals, absolute_import) -from builtins import str from ....interfaces import fsl as fsl # fsl from ....interfaces import utility as util # utility diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index 799041df37..2179a8b93e 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -78,7 +78,8 @@ def setup_class(self, tmpdir): def test_create_resting_preproc(self, mock_node, mock_realign_wf): wflow = create_resting_preproc(base_dir=os.getcwd()) - wflow.inputs.inputspec.num_noise_components = self.num_noise_components + # wflow.inputs.inputspec.num_noise_components = self.num_noise_components + wflow.inputs.compcor.variance_threshold = 0.15 mask_in = wflow.get_node('threshold').inputs mask_in.out_file = self.in_filenames['mask_file'] func_in = wflow.get_node('slicetimer').inputs @@ -89,16 +90,17 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf): # assert expected_file = os.path.abspath(self.out_filenames['components_file']) with open(expected_file, 'r') as components_file: - components_data = [line.split() for line in components_file] - num_got_components = len(components_data) - assert (num_got_components == self.num_noise_components - or num_got_components == self.fake_data.shape[3]) - first_two = [row[:2] for row in components_data[1:]] - assert first_two == [['-0.5172356654', '-0.6973053243'], [ - '0.2574722644', '0.1645270737' - ], ['-0.0806469590', - '0.5156853779'], ['0.7187176051', '-0.3235820287'], - ['-0.3783072450', '0.3406749013']] + components_data = [line.split() + for line in components_file.read().splitlines()] + num_got_components = len(components_data) + assert (num_got_components == self.num_noise_components or + num_got_components == self.fake_data.shape[3]) + first_two = [row[:2] for row in components_data[1:]] + assert first_two == [['-0.5172356654', '-0.6973053243'], + ['0.2574722644', '0.1645270737'], + ['-0.0806469590', '0.5156853779'], + ['0.7187176051', '-0.3235820287'], + ['-0.3783072450', '0.3406749013']] fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], From fd41b74a5432c241c47914ce4bdf22fb20feceaf Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 15 Feb 2019 18:35:33 -0800 Subject: [PATCH 26/36] ``traits.Range`` cannot be pickled with traits>=5 and python 2.7 --- nipype/info.py | 3 ++- nipype/workflows/rsfmri/fsl/tests/test_resting.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 7b1a757789..c6ead490b5 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -142,7 +142,8 @@ def get_nipype_gitversion(): 'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37, 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, 'scipy>=%s' % SCIPY_MIN_VERSION, - 'traits>=%s' % TRAITS_MIN_VERSION, + 'traits>=%s,<%s ; python_version == "2.7"' % (TRAITS_MIN_VERSION, '5.0.0'), + 'traits>=%s ; python_version >= "3.0"' % TRAITS_MIN_VERSION, 'future>=%s' % FUTURE_MIN_VERSION, 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, 'prov>=%s' % PROV_VERSION, diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index 2179a8b93e..a0ba2439a7 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -78,8 +78,7 @@ def setup_class(self, tmpdir): def test_create_resting_preproc(self, mock_node, mock_realign_wf): wflow = create_resting_preproc(base_dir=os.getcwd()) - # wflow.inputs.inputspec.num_noise_components = self.num_noise_components - wflow.inputs.compcor.variance_threshold = 0.15 + wflow.inputs.inputspec.num_noise_components = self.num_noise_components mask_in = wflow.get_node('threshold').inputs mask_in.out_file = self.in_filenames['mask_file'] func_in = wflow.get_node('slicetimer').inputs From a742c9ce7c57ce5547910e0aceacde9b185c48df Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 16 Feb 2019 00:02:17 -0800 Subject: [PATCH 27/36] pacify codacy --- .../workflows/rsfmri/fsl/tests/test_resting.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index a0ba2439a7..a6383c6f79 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -92,14 +92,16 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf): components_data = [line.split() for line in components_file.read().splitlines()] num_got_components = len(components_data) - assert (num_got_components == self.num_noise_components or - num_got_components == self.fake_data.shape[3]) + if not (num_got_components == self.num_noise_components or + num_got_components == self.fake_data.shape[3]): + raise AssertionError() first_two = [row[:2] for row in components_data[1:]] - assert first_two == [['-0.5172356654', '-0.6973053243'], - ['0.2574722644', '0.1645270737'], - ['-0.0806469590', '0.5156853779'], - ['0.7187176051', '-0.3235820287'], - ['-0.3783072450', '0.3406749013']] + if first_two != [['-0.5172356654', '-0.6973053243'], + ['0.2574722644', '0.1645270737'], + ['-0.0806469590', '0.5156853779'], + ['0.7187176051', '-0.3235820287'], + ['-0.3783072450', '0.3406749013']]: + raise AssertionError() fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], From 518a48930aa732513ebb3b93bc3b47031329984c Mon Sep 17 00:00:00 2001 From: rciric Date: Thu, 21 Feb 2019 10:35:09 -0800 Subject: [PATCH 28/36] revert unnecessary squeeze, correct docs --- nipype/algorithms/confounds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 871a512b34..a85aa786e8 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1205,7 +1205,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, Parameters ---------- - imgseries: nibabel NIfTI object + imgseries: nibabel image Time series data to be decomposed. mask_images: list List of nibabel images. Time series data from `img_series` is subset @@ -1247,7 +1247,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components_criterion = -1 mask_names = mask_names or range(len(mask_images)) for name, img in zip(mask_names, mask_images): - mask = img.get_data().astype(np.bool).squeeze() + mask = img.get_data().astype(np.bool) if imgseries.shape[:3] != mask.shape: raise ValueError( 'Inputs for CompCor, timeseries and mask, do not have ' From deceb95b089ca99c7fca9f7fbf9112b245872093 Mon Sep 17 00:00:00 2001 From: rciric Date: Thu, 21 Feb 2019 18:40:56 -0800 Subject: [PATCH 29/36] revise in accordance with @effigies review --- nipype/algorithms/confounds.py | 29 +++++++++++++++---- .../rsfmri/fsl/tests/test_resting.py | 16 +++++----- 2 files changed, 30 insertions(+), 15 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index a85aa786e8..017a9d1c36 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1224,12 +1224,26 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, 'polynomial' - Legendre polynomial basis 'cosine' - Discrete cosine (DCT) basis False - None (mean-removal only) + failure_mode: str + Action to be taken in the event that any decomposition fails to + identify any components. `error` indicates that the routine should + raise an exception and exit, while any other value indicates that the + routine should return a matrix of NaN values equal in size to the + requested decomposition matrix. + mask_names: list or None + List of names for each image in `mask_images`. This should be equal in + length to `mask_images`, with the ith element of `mask_names` naming + the ith element of `mask_images`. Filter options: - degree: order of polynomial used to remove trends from the timeseries - period_cut: minimum period (in sec) for DCT high-pass filter - repetition_time: time (in sec) between volume acquisitions + degree: int + Order of polynomial used to remove trends from the timeseries + period_cut: float + Minimum period (in sec) for DCT high-pass filter + repetition_time: float + Time (in sec) between volume acquisitions. This must be defined if + the `filter_type` is `cosine`. Returns ------- @@ -1262,6 +1276,9 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, # Currently support Legendre-polynomial or cosine or detrending # With no filter, the mean is nonetheless removed (poly w/ degree 0) if filter_type == 'cosine': + if repetition_time is None: + raise ValueError( + 'Repetition time must be provided for cosine filter') voxel_timecourses, basis = cosine_filter( voxel_timecourses, repetition_time, period_cut) elif filter_type in ('polynomial', False): @@ -1286,8 +1303,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, if failure_mode == 'error': raise if components_criterion >= 1: - u = np.empty((M.shape[0], components_criterion), - dtype=np.float32) * np.nan + u = np.full((M.shape[0], components_criterion), + np.nan, dtype=np.float32) else: continue @@ -1329,7 +1346,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, if components is None: if failure_mode == 'error': raise ValueError('No components found') - components = np.full((M.shape[0], num_components), np.NaN) + components = np.full((M.shape[0], num_components), np.nan) return components, basis, metadata diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index a6383c6f79..a0ba2439a7 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -92,16 +92,14 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf): components_data = [line.split() for line in components_file.read().splitlines()] num_got_components = len(components_data) - if not (num_got_components == self.num_noise_components or - num_got_components == self.fake_data.shape[3]): - raise AssertionError() + assert (num_got_components == self.num_noise_components or + num_got_components == self.fake_data.shape[3]) first_two = [row[:2] for row in components_data[1:]] - if first_two != [['-0.5172356654', '-0.6973053243'], - ['0.2574722644', '0.1645270737'], - ['-0.0806469590', '0.5156853779'], - ['0.7187176051', '-0.3235820287'], - ['-0.3783072450', '0.3406749013']]: - raise AssertionError() + assert first_two == [['-0.5172356654', '-0.6973053243'], + ['0.2574722644', '0.1645270737'], + ['-0.0806469590', '0.5156853779'], + ['0.7187176051', '-0.3235820287'], + ['-0.3783072450', '0.3406749013']] fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], From fa64907561cf7707c713f251b13c1a3b1cbe7dd4 Mon Sep 17 00:00:00 2001 From: rciric Date: Sat, 23 Feb 2019 15:36:51 -0800 Subject: [PATCH 30/36] revise in accordance with @effigies review --- nipype/algorithms/confounds.py | 2 +- nipype/workflows/rsfmri/fsl/tests/test_resting.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 017a9d1c36..03b74a340f 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1306,7 +1306,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, u = np.full((M.shape[0], components_criterion), np.nan, dtype=np.float32) else: - continue + u = np.full((M.shape[0], 1), np.nan, dtype=np.float32) variance_explained = (s ** 2) / np.sum(s ** 2) cumulative_variance_explained = np.cumsum(variance_explained) diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index a0ba2439a7..eba73a75b1 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -89,8 +89,8 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf): # assert expected_file = os.path.abspath(self.out_filenames['components_file']) with open(expected_file, 'r') as components_file: - components_data = [line.split() - for line in components_file.read().splitlines()] + components_data = [line.rstrip().split() + for line in components_file] num_got_components = len(components_data) assert (num_got_components == self.num_noise_components or num_got_components == self.fake_data.shape[3]) From e6dfe7d2aa5240fb7d68c891c04d78b83cf3c69f Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 1 Mar 2019 00:48:20 -0800 Subject: [PATCH 31/36] ensure s is defined, support NaN failure mode with empty mask --- nipype/algorithms/confounds.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 03b74a340f..e0d8df6f68 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1299,9 +1299,10 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, # principal components using a singular value decomposition." try: u, s, _ = fallback_svd(M, full_matrices=False) - except np.linalg.LinAlgError: + except (np.linalg.LinAlgError, ValueError): if failure_mode == 'error': raise + s = np.full(M.shape[0], np.nan, dtype=np.float32) if components_criterion >= 1: u = np.full((M.shape[0], components_criterion), np.nan, dtype=np.float32) From 27ed03f3ed420707ea01599e42cbd42ea1e2adf3 Mon Sep 17 00:00:00 2001 From: rciric Date: Wed, 27 Mar 2019 10:03:43 -0700 Subject: [PATCH 32/36] filter handles empty masks, use `squeeze_image` --- nipype/algorithms/confounds.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index e0d8df6f68..07f1208540 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1084,6 +1084,8 @@ def is_outlier(points, thresh=3.5): def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0: + return data, np.array([]) data = data.reshape((-1, timepoints)) @@ -1115,6 +1117,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0: + return data, np.array([]) # Rearrange all voxel-wise time-series in rows data = data.reshape((-1, timepoints)) @@ -1261,7 +1265,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components_criterion = -1 mask_names = mask_names or range(len(mask_images)) for name, img in zip(mask_names, mask_images): - mask = img.get_data().astype(np.bool) + mask = nb.squeeze_image(img).get_data().astype(np.bool) if imgseries.shape[:3] != mask.shape: raise ValueError( 'Inputs for CompCor, timeseries and mask, do not have ' From 82a25c2308df0fa29568d5c27b25bc3ad4d4e4f5 Mon Sep 17 00:00:00 2001 From: rciric Date: Thu, 28 Mar 2019 08:41:05 -0700 Subject: [PATCH 33/36] default to old behaviour for temporal filters --- nipype/algorithms/confounds.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index ad5a6ab8b5..949f510d2e 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1082,10 +1082,11 @@ def is_outlier(points, thresh=3.5): return timepoints_to_discard -def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): +def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1, + failure_mode='error'): datashape = data.shape timepoints = datashape[axis] - if datashape[0] == 0: + if datashape[0] == 0 and failure_mode != 'error': return data, np.array([]) data = data.reshape((-1, timepoints)) @@ -1105,7 +1106,8 @@ def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): return residuals.reshape(datashape), non_constant_regressors -def regress_poly(degree, data, remove_mean=True, axis=-1): +def regress_poly(degree, data, remove_mean=True, axis=-1, + failure_mode='error'): """ Returns data with degree polynomial regressed out. @@ -1118,7 +1120,7 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): datashape = data.shape timepoints = datashape[axis] - if datashape[0] == 0: + if datashape[0] == 0 and failure_mode != 'error': return data, np.array([]) # Rearrange all voxel-wise time-series in rows @@ -1285,12 +1287,14 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, raise ValueError( 'Repetition time must be provided for cosine filter') voxel_timecourses, basis = cosine_filter( - voxel_timecourses, repetition_time, period_cut) + voxel_timecourses, repetition_time, period_cut, + failure_mode=failure_mode) elif filter_type in ('polynomial', False): # from paper: # "The constant and linear trends of the columns in the matrix M were # removed [prior to ...]" - voxel_timecourses, basis = regress_poly(degree, voxel_timecourses) + voxel_timecourses, basis = regress_poly(degree, voxel_timecourses, + failure_mode=failure_mode) # "Voxel time series from the noise ROI (either anatomical or tSTD) were # placed in a matrix M of size Nxm, with time along the row dimension From 79e840d8763944f5cb1a4406971506b393fcdc06 Mon Sep 17 00:00:00 2001 From: rciric Date: Thu, 18 Apr 2019 23:28:23 -0700 Subject: [PATCH 34/36] integrate @effigies review comments --- nipype/algorithms/confounds.py | 56 ++++++++++++++----------- nipype/algorithms/tests/test_CompCor.py | 5 +-- nipype/info.py | 3 +- 3 files changed, 34 insertions(+), 30 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 949f510d2e..4fb4e1c91e 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -11,6 +11,7 @@ import os import os.path as op from collections import OrderedDict +from itertools import chain import nibabel as nb import numpy as np @@ -1262,11 +1263,18 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, Dictionary of eigenvalues, fractional explained variances, and cumulative explained variances. """ - components = None basis = np.array([]) if components_criterion == 'all': components_criterion = -1 mask_names = mask_names or range(len(mask_images)) + + comp_list = [] + md_mask = [] + md_sv = [] + md_var = [] + md_cumvar = [] + md_retained = [] + for name, img in zip(mask_names, mask_images): mask = nb.squeeze_image(img).get_data().astype(np.bool) if imgseries.shape[:3] != mask.shape: @@ -1331,32 +1339,30 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, num_components = int(num_components) if num_components == 0: break - if components is None: - components = u[:, :num_components] - metadata = OrderedDict() - metadata['mask'] = [name] * len(s) - metadata['singular_value'] = s - metadata['variance_explained'] = variance_explained - metadata['cumulative_variance_explained'] = ( - cumulative_variance_explained) - metadata['retained'] = [i < num_components for i in range(len(s))] - else: - components = np.hstack((components, u[:, :num_components])) - metadata['mask'] = metadata['mask'] + [name] * len(s) - metadata['singular_value'] = ( - np.hstack((metadata['singular_value'], s))) - metadata['variance_explained'] = ( - np.hstack((metadata['variance_explained'], - variance_explained))) - metadata['cumulative_variance_explained'] = ( - np.hstack((metadata['cumulative_variance_explained'], - cumulative_variance_explained))) - metadata['retained'] = ( - metadata['retained'] + [i < num_components for i in range(len(s))]) - if components is None: + + components.append(u[:, :num_components]) + md_mask.append([name] * len(s)) + md_sv.append(s) + md_var.append(variance_explained) + md_cumvar.append(cumulative_variance_explained) + md_retained.append(i < num_components for i in range(len(s))) + + if len(components) > 0: + components = np.hstack(components) + else: if failure_mode == 'error': raise ValueError('No components found') - components = np.full((M.shape[0], num_components), np.nan) + components = np.full((M.shape[0], num_components), + np.nan, dtype=np.float32) + + metadata = OrderedDict( + ('mask', list(chain(*md_mask))), + ('singular_value', np.hstack(md_sv)), + ('variance_explained', np.hstack(md_var)), + ('cumulative_variance_explained', np.hstack(md_cumvar)), + ('retained', list(chain(*md_retained))) + ) + return components, basis, metadata diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 4361c3155c..c7b770fb25 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os -import re import nibabel as nb import numpy as np @@ -198,7 +197,7 @@ def run_cc(self, expected_n_components = min(ccinterface.inputs.num_components, self.fake_data.shape[3]) - components_data = [re.sub('\n', '', line).split('\t') + components_data = [line.rstrip().split('\t') for line in components_file] # the first item will be '#', we can throw it out @@ -224,7 +223,7 @@ def run_cc(self, assert os.path.getsize(expected_metadata_file) > 0 with open(ccresult.outputs.metadata_file, 'r') as metadata_file: - components_metadata = [re.sub('\n', '', line).split('\t') + components_metadata = [line.rstrip().split('\t') for line in metadata_file] components_metadata = {i: j for i, j in zip(components_metadata[0], diff --git a/nipype/info.py b/nipype/info.py index 2f29f6126d..f9361031bb 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -141,8 +141,7 @@ def get_nipype_gitversion(): 'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37, 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, 'scipy>=%s' % SCIPY_MIN_VERSION, - 'traits>=%s,<%s ; python_version == "2.7"' % (TRAITS_MIN_VERSION, '5.0.0'), - 'traits>=%s ; python_version >= "3.0"' % TRAITS_MIN_VERSION, + 'traits>=%s,!=5.0' % TRAITS_MIN_VERSION, 'future>=%s' % FUTURE_MIN_VERSION, 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, 'prov>=%s' % PROV_VERSION, From 1b1b6fa2b342acd500f599fe6890b2d683f2ec57 Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 19 Apr 2019 03:02:30 -0700 Subject: [PATCH 35/36] propagate retention status to metadata; use list instead of generator; correct var name --- nipype/algorithms/confounds.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 4fb4e1c91e..a987c98e55 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -667,7 +667,7 @@ def _run_interface(self, runtime): f.write('\t'.join(['component'] + list(metadata.keys())) + '\n') for i in zip(components_names, *metadata.values()): f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' - '{0[3]:.10f}\t{0[4]:.10f}\n'.format(i)) + '{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n'.format(i)) return runtime @@ -1268,7 +1268,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components_criterion = -1 mask_names = mask_names or range(len(mask_images)) - comp_list = [] + components = [] md_mask = [] md_sv = [] md_var = [] @@ -1345,8 +1345,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, md_sv.append(s) md_var.append(variance_explained) md_cumvar.append(cumulative_variance_explained) - md_retained.append(i < num_components for i in range(len(s))) - + md_retained.append([i < num_components for i in range(len(s))]) + if len(components) > 0: components = np.hstack(components) else: @@ -1355,13 +1355,13 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, components = np.full((M.shape[0], num_components), np.nan, dtype=np.float32) - metadata = OrderedDict( + metadata = OrderedDict([ ('mask', list(chain(*md_mask))), ('singular_value', np.hstack(md_sv)), ('variance_explained', np.hstack(md_var)), ('cumulative_variance_explained', np.hstack(md_cumvar)), ('retained', list(chain(*md_retained))) - ) + ]) return components, basis, metadata From b80a3d7f1cde35573a73246271bd0dcf42dc7f4b Mon Sep 17 00:00:00 2001 From: rciric Date: Fri, 19 Apr 2019 11:06:16 -0700 Subject: [PATCH 36/36] update unit test to include new metadata field --- nipype/algorithms/tests/test_CompCor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index c7b770fb25..3aa535dc19 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -73,7 +73,8 @@ def test_compcor_variance_threshold_and_metadata(self): 'mask': 'mask', 'singular_value': '4.0720553036', 'variance_explained': '0.5527211465', - 'cumulative_variance_explained': '0.5527211465' + 'cumulative_variance_explained': '0.5527211465', + 'retained': 'True', } ccinterface = CompCor( variance_threshold=0.7,