@@ -40,7 +40,6 @@ def test_input_estimator_unchanged():
40
40
assert_true (transformer .estimator is est )
41
41
42
42
43
- @skip_if_32bit
44
43
def test_feature_importances ():
45
44
X , y = datasets .make_classification (
46
45
n_samples = 1000 , n_features = 10 , n_informative = 3 , n_redundant = 0 ,
@@ -59,17 +58,33 @@ def test_feature_importances():
59
58
feature_mask = np .abs (importances ) > func (importances )
60
59
assert_array_almost_equal (X_new , X [:, feature_mask ])
61
60
61
+
62
+ def test_sample_weight ():
63
+ # Ensure sample weights are passed to underlying estimator
64
+ X , y = datasets .make_classification (
65
+ n_samples = 100 , n_features = 10 , n_informative = 3 , n_redundant = 0 ,
66
+ n_repeated = 0 , shuffle = False , random_state = 0 )
67
+
62
68
# Check with sample weights
63
69
sample_weight = np .ones (y .shape )
64
70
sample_weight [y == 1 ] *= 100
65
71
66
- est = RandomForestClassifier ( n_estimators = 50 , random_state = 0 )
72
+ est = LogisticRegression ( random_state = 0 , fit_intercept = False )
67
73
transformer = SelectFromModel (estimator = est )
74
+ transformer .fit (X , y , sample_weight = None )
75
+ mask = transformer ._get_support_mask ()
68
76
transformer .fit (X , y , sample_weight = sample_weight )
69
- importances = transformer .estimator_ .feature_importances_
77
+ weighted_mask = transformer ._get_support_mask ()
78
+ assert not np .all (weighted_mask == mask )
70
79
transformer .fit (X , y , sample_weight = 3 * sample_weight )
71
- importances_bis = transformer .estimator_ .feature_importances_
72
- assert_almost_equal (importances , importances_bis )
80
+ reweighted_mask = transformer ._get_support_mask ()
81
+ assert np .all (weighted_mask == reweighted_mask )
82
+
83
+
84
+ def test_coef_default_threshold ():
85
+ X , y = datasets .make_classification (
86
+ n_samples = 100 , n_features = 10 , n_informative = 3 , n_redundant = 0 ,
87
+ n_repeated = 0 , shuffle = False , random_state = 0 )
73
88
74
89
# For the Lasso and related models, the threshold defaults to 1e-5
75
90
transformer = SelectFromModel (estimator = Lasso (alpha = 0.1 ))
@@ -80,7 +95,7 @@ def test_feature_importances():
80
95
81
96
82
97
@skip_if_32bit
83
- def test_feature_importances_2d_coef ():
98
+ def test_2d_coef ():
84
99
X , y = datasets .make_classification (
85
100
n_samples = 1000 , n_features = 10 , n_informative = 3 , n_redundant = 0 ,
86
101
n_repeated = 0 , shuffle = False , random_state = 0 , n_classes = 4 )
0 commit comments