@@ -1419,3 +1419,53 @@ def test_enet_sample_weight_does_not_overwrite_sample_weight(check_input):
1419
1419
reg .fit (X , y , sample_weight = sample_weight , check_input = check_input )
1420
1420
1421
1421
assert_array_equal (sample_weight , sample_weight_1_25 )
1422
+
1423
+
1424
+ @pytest .mark .parametrize ("ridge_alpha" , [1e-1 , 1. , 1e6 ])
1425
+ @pytest .mark .parametrize ("normalize" , [True , False ])
1426
+ def test_enet_ridge_consistency (normalize , ridge_alpha ):
1427
+ # Check that ElasticNet(l1_ratio=0) converges to the same solution as Ridge
1428
+ # provided that the value of alpha is adapted.
1429
+ #
1430
+ # XXX: this test does not pass for weaker regularization (lower values of
1431
+ # ridge_alpha): it could be either a problem of ElasticNet or Ridge (less
1432
+ # likely) and depends on the dataset statistics: lower values for
1433
+ # effective_rank are more problematic in particular.
1434
+
1435
+ rng = np .random .RandomState (42 )
1436
+ X , y = make_regression (
1437
+ n_samples = 100 ,
1438
+ n_features = 300 ,
1439
+ effective_rank = 100 ,
1440
+ n_informative = 50 ,
1441
+ random_state = rng ,
1442
+ )
1443
+ sw = rng .uniform (low = 0.01 , high = 2 , size = X .shape [0 ])
1444
+
1445
+ ridge = Ridge (
1446
+ alpha = ridge_alpha ,
1447
+ normalize = normalize ,
1448
+ ).fit (X , y , sample_weight = sw )
1449
+
1450
+ enet = ElasticNet (
1451
+ alpha = ridge_alpha / sw .sum (),
1452
+ normalize = normalize ,
1453
+ l1_ratio = 0. ,
1454
+ max_iter = 1000 ,
1455
+ )
1456
+ # Even when the ElasticNet model has actually converged, the duality gap
1457
+ # convergence criterion is never met when l1_ratio is 0 and for any value
1458
+ # of the `tol` parameter. The convergence message should point the user to
1459
+ # Ridge instead:
1460
+ expected_msg = (
1461
+ r"Objective did not converge\. .* "
1462
+ r"Linear regression models with null weight for the "
1463
+ r"l1 regularization term are more efficiently fitted "
1464
+ r"using one of the solvers implemented in "
1465
+ r"sklearn\.linear_model\.Ridge/RidgeCV instead\."
1466
+ )
1467
+ with pytest .warns (ConvergenceWarning , match = expected_msg ):
1468
+ enet .fit (X , y , sample_weight = sw )
1469
+
1470
+ assert_allclose (ridge .coef_ , enet .coef_ )
1471
+ assert_allclose (ridge .intercept_ , enet .intercept_ )
0 commit comments