Skip to content

Commit 9603387

Browse files
set alpha=0 as default
1 parent 6816dad commit 9603387

File tree

3 files changed

+46
-63
lines changed

3 files changed

+46
-63
lines changed

src/glum/_glm.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -940,7 +940,7 @@ def _set_up_for_fit(self, y: np.ndarray) -> None:
940940
elif (self.lower_bounds is None) and (self.upper_bounds is None):
941941
if np.all(np.asarray(self.l1_ratio) == 0):
942942
self._solver = "irls-ls"
943-
elif getattr(self, "alpha", 1) == 0 and not self.alpha_search:
943+
elif getattr(self, "alpha", 0) == 0 and not self.alpha_search:
944944
self._solver = "irls-ls"
945945
else:
946946
self._solver = "irls-cd"
@@ -2304,8 +2304,7 @@ def covariance_matrix(
23042304
_expected_information = expected_information
23052305

23062306
if (
2307-
(hasattr(self, "alpha") and self.alpha is None)
2308-
or (
2307+
(
23092308
hasattr(self, "alpha")
23102309
and isinstance(self.alpha, (int, float))
23112310
and self.alpha > 0
@@ -2914,11 +2913,11 @@ class GeneralizedLinearRegressor(GeneralizedLinearRegressorBase):
29142913
alpha : {float, array-like}, optional (default=None)
29152914
Constant that multiplies the penalty terms and thus determines the
29162915
regularization strength. If ``alpha_search`` is ``False`` (the default),
2917-
then ``alpha`` must be a scalar or None (equivalent to ``alpha=1.0``).
2916+
then ``alpha`` must be a scalar or None (equivalent to ``alpha=0``).
29182917
If ``alpha_search`` is ``True``, then ``alpha`` must be an iterable or
29192918
``None``. See ``alpha_search`` to find how the regularization path is
29202919
set if ``alpha`` is ``None``. See the notes for the exact mathematical
2921-
meaning of this parameter. ``alpha = 0`` is equivalent to unpenalized
2920+
meaning of this parameter. ``alpha=0`` is equivalent to unpenalized
29222921
GLMs. In this case, the design matrix ``X`` must have full column rank
29232922
(no collinearities).
29242923
@@ -3146,10 +3145,11 @@ class GeneralizedLinearRegressor(GeneralizedLinearRegressorBase):
31463145
31473146
drop_first : bool, optional (default = False)
31483147
If ``True``, drop the first column when encoding categorical variables.
3149-
Set this to True when alpha=0 and solver='auto' to prevent an error due to a
3150-
singular feature matrix. In the case of using a formula with interactions,
3151-
setting this argument to ``True`` ensures structural full-rankness (it is
3152-
equivalent to ``ensure_full_rank`` in formulaic and tabmat).
3148+
Set this to True when ``alpha=0`` and ``solver='auto'`` to prevent an error
3149+
due to a singular feature matrix. In the case of using a formula with
3150+
interactions, setting this argument to ``True`` ensures structural
3151+
full-rankness (it is equivalent to ``ensure_full_rank`` in formulaic and
3152+
tabmat).
31533153
31543154
robust : bool, optional (default = False)
31553155
If true, then robust standard errors are computed by default.
@@ -3573,7 +3573,7 @@ def fit(
35733573
self.coef_ = self.coef_path_[-1]
35743574
else:
35753575
if self.alpha is None:
3576-
_alpha = 1.0
3576+
_alpha = 0.0
35773577
else:
35783578
_alpha = self.alpha
35793579
if _alpha > 0 and self.l1_ratio > 0 and self._solver != "irls-cd":

tests/glm/test_distribution.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,6 @@ def test_poisson_deviance_dispersion_loglihood(weighted):
296296
# logLik(glm_model) # -7.390977 (df=1)
297297

298298
regressor = GeneralizedLinearRegressor(
299-
alpha=0,
300299
family="poisson",
301300
fit_intercept=False,
302301
gradient_tol=1e-8,
@@ -345,7 +344,6 @@ def test_gamma_deviance_dispersion_loglihood(weighted):
345344
# logLik(glm_model) # -7.057068 (df=2)
346345

347346
regressor = GeneralizedLinearRegressor(
348-
alpha=0,
349347
family="gamma",
350348
fit_intercept=False,
351349
gradient_tol=1e-8,
@@ -393,7 +391,6 @@ def test_gaussian_deviance_dispersion_loglihood(family, weighted):
393391
# logLik(glm_model) # -7.863404 (df=2)
394392

395393
regressor = GeneralizedLinearRegressor(
396-
alpha=0,
397394
family=family,
398395
fit_intercept=False,
399396
gradient_tol=1e-8,
@@ -441,7 +438,6 @@ def test_tweedie_deviance_dispersion_loglihood(weighted):
441438
# logLiktweedie(glm_model) # -8.35485
442439

443440
regressor = GeneralizedLinearRegressor(
444-
alpha=0,
445441
family=TweedieDistribution(1.5),
446442
fit_intercept=False,
447443
gradient_tol=1e-8,
@@ -490,7 +486,6 @@ def test_binomial_deviance_dispersion_loglihood(weighted):
490486
# logLik(glm_model) # -3.365058 (df=1)
491487

492488
regressor = GeneralizedLinearRegressor(
493-
alpha=0,
494489
family="binomial",
495490
fit_intercept=False,
496491
gradient_tol=1e-8,
@@ -535,7 +530,6 @@ def test_negative_binomial_deviance_dispersion_loglihood(weighted):
535530
# logLik(glm_model) # -4.187887 (df=1)
536531

537532
regressor = GeneralizedLinearRegressor(
538-
alpha=0,
539533
family="negative.binomial",
540534
fit_intercept=False,
541535
gradient_tol=1e-8,

0 commit comments

Comments
 (0)