Skip to content
This repository was archived by the owner on Apr 2, 2020. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion nistats/contrasts.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def compute_contrast(labels, regression_result, con_val, contrast_type=None):
effect_[:, label_mask] = wcbeta
var_[label_mask] = rss

dof_ = regression_result[label_].df_resid
dof_ = regression_result[label_].df_residuals
return Contrast(effect=effect_, variance=var_, dim=dim, dof=dof_,
contrast_type=contrast_type)

Expand Down
4 changes: 2 additions & 2 deletions nistats/first_level_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def run_glm(Y, X, noise_model='ar1', bins=100, n_jobs=1, verbose=0):

if noise_model == 'ar1':
# compute and discretize the AR1 coefs
ar1 = ((ols_result.resid[1:] * ols_result.resid[:-1]).sum(axis=0) /
(ols_result.resid ** 2).sum(axis=0))
ar1 = ((ols_result.residuals[1:] * ols_result.residuals[:-1]).sum(axis=0) /
(ols_result.residuals ** 2).sum(axis=0))
del ols_result
ar1 = (ar1 * bins).astype(np.int) * 1. / bins
# Fit the AR model acccording to current AR(1) estimates
Expand Down
23 changes: 15 additions & 8 deletions nistats/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

Author: Bertrand Thirion, 2011--2015
"""
import warnings

import numpy as np

Expand Down Expand Up @@ -72,8 +73,14 @@ def __init__(self, theta, Y, model, cov=None, dispersion=1., nuisance=None,
self.df_total = Y.shape[0]
self.df_model = model.df_model
# put this as a parameter of LikelihoodModel
self.df_resid = self.df_total - self.df_model

self.df_residuals = self.df_total - self.df_model

@setattr_on_read
def df_resid(self):
warnings.warn("The attribute 'df_resid' from LikelihoodModelResults"
"has been deprecated. Please use 'df_residuals'.")
return self.df_residuals

@setattr_on_read
def logL(self):
"""
Expand Down Expand Up @@ -200,7 +207,7 @@ def Tcontrast(self, matrix, store=('t', 'effect', 'sd'), dispersion=None):
if 't' in store:
st_t = np.squeeze(effect * positive_reciprocal(sd))
return TContrastResults(effect=st_effect, t=st_t, sd=st_sd,
df_den=self.df_resid)
df_den=self.df_residuals)

def Fcontrast(self, matrix, dispersion=None, invcov=None):
""" Compute an Fcontrast for a contrast matrix `matrix`.
Expand Down Expand Up @@ -262,7 +269,7 @@ def Fcontrast(self, matrix, dispersion=None, invcov=None):
return FContrastResults(
effect=ctheta, covariance=self.vcov(
matrix=matrix, dispersion=dispersion[np.newaxis]),
F=F, df_den=self.df_resid, df_num=invcov.shape[0])
F=F, df_den=self.df_residuals, df_num=invcov.shape[0])

def conf_int(self, alpha=.05, cols=None, dispersion=None):
''' The confidence interval of the specified theta estimates.
Expand Down Expand Up @@ -306,18 +313,18 @@ def conf_int(self, alpha=.05, cols=None, dispersion=None):

'''
if cols is None:
lower = self.theta - inv_t_cdf(1 - alpha / 2, self.df_resid) *\
lower = self.theta - inv_t_cdf(1 - alpha / 2, self.df_residuals) * \
np.sqrt(np.diag(self.vcov(dispersion=dispersion)))
upper = self.theta + inv_t_cdf(1 - alpha / 2, self.df_resid) *\
upper = self.theta + inv_t_cdf(1 - alpha / 2, self.df_residuals) * \
np.sqrt(np.diag(self.vcov(dispersion=dispersion)))
else:
lower, upper = [], []
for i in cols:
lower.append(
self.theta[i] - inv_t_cdf(1 - alpha / 2, self.df_resid) *
self.theta[i] - inv_t_cdf(1 - alpha / 2, self.df_residuals) *
np.sqrt(self.vcov(column=i, dispersion=dispersion)))
upper.append(
self.theta[i] + inv_t_cdf(1 - alpha / 2, self.df_resid) *
self.theta[i] + inv_t_cdf(1 - alpha / 2, self.df_residuals) *
np.sqrt(self.vcov(column=i, dispersion=dispersion)))
return np.asarray(list(zip(lower, upper)))

Expand Down
78 changes: 68 additions & 10 deletions nistats/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

__docformat__ = 'restructuredtext en'

import warnings

import numpy as np

from nibabel.onetime import setattr_on_read
Expand Down Expand Up @@ -58,7 +60,7 @@ class OLSModel(object):
normalized_cov_beta : ndarray
``np.dot(calc_beta, calc_beta.T)``

df_resid : scalar
df_residuals : scalar
Degrees of freedom of the residuals. Number of observations less the
rank of the design.

Expand Down Expand Up @@ -90,7 +92,13 @@ def initialize(self, design):

eps = np.abs(self.design).sum() * np.finfo(np.float).eps
self.df_model = matrix_rank(self.design, eps)
self.df_resid = self.df_total - self.df_model
self.df_residuals = self.df_total - self.df_model

@setattr_on_read
def df_resid(self):
warnings.warn("The attribute 'df_resid' from OLSModel"
"has been deprecated. Please use 'df_residuals'.")
return self.df_residuals

def logL(self, beta, Y, nuisance=None):
r''' Returns the value of the loglikelihood function at beta.
Expand Down Expand Up @@ -257,15 +265,16 @@ def whiten(self, X):
_X[(i + 1):] = _X[(i + 1):] - self.rho[i] * X[0: - (i + 1)]
return _X

from nistats._utils.helpers import replace_parameters

class RegressionResults(LikelihoodModelResults):
"""
This class summarizes the fit of a linear regression model.

It handles the output of contrasts, estimates of covariance, etc.
"""

def __init__(self, theta, Y, model, wY, wresid, cov=None, dispersion=1.,
@replace_parameters({'wresid': 'wresdiuals'}, lib_name='Nistats')
def __init__(self, theta, Y, model, wY, wresiduals, cov=None, dispersion=1.,
nuisance=None):
"""See LikelihoodModelResults constructor.

Expand All @@ -275,18 +284,45 @@ def __init__(self, theta, Y, model, wY, wresid, cov=None, dispersion=1.,
LikelihoodModelResults.__init__(self, theta, Y, model, cov,
dispersion, nuisance)
self.wY = wY
self.wresid = wresid
self.wresiduals = wresiduals
self.wdesign = model.wdesign

@setattr_on_read
def wresid(self):
warnings.warn("'RegressionResults.wresid' method has been deprecated "
"and will be removed. "
"Please use 'RegressionResults.wresiduals'.",
FutureWarning,
)
return self.wresiduals

@setattr_on_read
def resid(self):
warnings.warn("'RegressionResults.resid' method has been deprecated "
"and will be removed. "
"Please use 'RegressionResults.residuals'.",
FutureWarning,
)
return self.residuals

@setattr_on_read
def residuals(self):
"""
Residuals from the fit.
"""
return self.Y - self.predicted

@setattr_on_read
def norm_resid(self):
warnings.warn("'RegressionResults.norm_resid' method has been deprecated "
"and will be removed. "
"Please use 'RegressionResults.normalized_residuals'.",
FutureWarning,
)
return self.normalized_residuals

@setattr_on_read
def normalized_residuals(self):
"""
Residuals, normalized to have unit length.

Expand All @@ -303,7 +339,7 @@ def norm_resid(self):
See: Montgomery and Peck 3.2.1 p. 68
Davidson and MacKinnon 15.2 p 662
"""
return self.resid * positive_reciprocal(np.sqrt(self.dispersion))
return self.residuals * positive_reciprocal(np.sqrt(self.dispersion))

@setattr_on_read
def predicted(self):
Expand All @@ -318,7 +354,7 @@ def predicted(self):
def SSE(self):
"""Error sum of squares. If not from an OLS model this is "pseudo"-SSE.
"""
return (self.wresid ** 2).sum(0)
return (self.wresiduals ** 2).sum(0)

@setattr_on_read
def r_square(self):
Expand All @@ -330,7 +366,7 @@ def r_square(self):
@setattr_on_read
def MSE(self):
""" Mean square (error) """
return self.SSE / self.df_resid
return self.SSE / self.df_residuals


class SimpleRegressionResults(LikelihoodModelResults):
Expand All @@ -353,7 +389,7 @@ def __init__(self, results):
self.df_total = results.Y.shape[0]
self.df_model = results.model.df_model
# put this as a parameter of LikelihoodModel
self.df_resid = self.df_total - self.df_model
self.df_residuals = self.df_total - self.df_model

def logL(self, Y):
"""
Expand All @@ -362,12 +398,34 @@ def logL(self, Y):
raise ValueError('can not use this method for simple results')

def resid(self, Y):
warnings.warn("'SimpleRegressionResults.resid()' method has been deprecated "
"and will be removed. "
"Please use 'SimpleRegressionResults.residuals()'.",
FutureWarning,
)
return self.residuals(Y)

def residuals(self, Y):
"""
Residuals from the fit.
"""
return Y - self.predicted

@setattr_on_read
def df_resid(self):
warnings.warn("The attribute 'df_resid' from OLSModel"
"has been deprecated. Please use 'df_residuals'.")
return self.df_residuals

def norm_resid(self, Y):
warnings.warn("'SimpleRegressionResults.norm_resid' method has been deprecated "
"and will be removed. "
"Please use 'SimpleRegressionResults.normalized_residuals'.",
FutureWarning,
)
return self.normalized_residuals(Y)

def normalized_residuals(self, Y):
"""
Residuals, normalized to have unit length.

Expand All @@ -384,7 +442,7 @@ def norm_resid(self, Y):
See: Montgomery and Peck 3.2.1 p. 68
Davidson and MacKinnon 15.2 p 662
"""
return self.resid(Y) * positive_reciprocal(np.sqrt(self.dispersion))
return self.residuals(Y) * positive_reciprocal(np.sqrt(self.dispersion))

def predicted(self):
""" Return linear predictor values from a design matrix.
Expand Down
6 changes: 3 additions & 3 deletions nistats/tests/test_first_level_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,9 +606,9 @@ def test_first_level_model_residuals():
noise_model='ols')
model.fit(fmri_data, design_matrices=design_matrices)

resid = model.residuals[0]
mean_resids = model.masker_.transform(resid).mean(0)
assert_array_almost_equal(mean_resids, 0)
residuals = model.residuals[0]
mean_residuals = model.masker_.transform(residuals).mean(0)
assert_array_almost_equal(mean_residuals, 0)


def test_first_level_model_predictions_r_square():
Expand Down
21 changes: 13 additions & 8 deletions nistats/tests/test_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

import numpy as np
import pytest

from numpy.testing import (assert_array_almost_equal,
assert_almost_equal,
Expand All @@ -20,17 +21,21 @@
def test_OLS():
model = OLSModel(design=X)
results = model.fit(Y)
assert results.df_resid == 30
assert results.resid.shape[0] == 40
assert results.df_residuals == 30
assert results.residuals.shape[0] == 40
assert results.predicted.shape[0] == 40
with pytest.warns(FutureWarning):
assert results.resid.shape[0] == 40


def test_AR():
model = ARModel(design=X, rho=0.4)
results = model.fit(Y)
assert results.df_resid == 30
assert results.resid.shape[0] == 40
assert results.df_residuals == 30
assert results.residuals.shape[0] == 40
assert results.predicted.shape[0] == 40
with pytest.warns(FutureWarning):
assert results.resid.shape[0] == 40


def test_residuals():
Expand All @@ -42,7 +47,7 @@ def test_residuals():
Xintercept[:, 0] = 1
model = OLSModel(design=Xintercept)
results = model.fit(Y)
assert_almost_equal(results.resid.mean(), 0)
assert_almost_equal(results.residuals.mean(), 0)


def test_predicted_r_square():
Expand All @@ -54,7 +59,7 @@ def test_predicted_r_square():
# rounding errors)
model = OLSModel(design=Xshort)
results = model.fit(Yshort)
assert_almost_equal(results.resid.sum(), 0)
assert_almost_equal(results.residuals.sum(), 0)
assert_array_almost_equal(results.predicted, Yshort)
assert_almost_equal(results.r_square, 1.0)

Expand All @@ -64,12 +69,12 @@ def test_OLS_degenerate():
Xd[:, 0] = Xd[:, 1] + Xd[:, 2]
model = OLSModel(design=Xd)
results = model.fit(Y)
assert results.df_resid == 31
assert results.df_residuals == 31


def test_AR_degenerate():
Xd = X.copy()
Xd[:, 0] = Xd[:, 1] + Xd[:, 2]
model = ARModel(design=Xd, rho=0.9)
results = model.fit(Y)
assert results.df_resid == 31
assert results.df_residuals == 31