Skip to content

Commit

Permalink
Update pixi lockfile (#912)
Browse files Browse the repository at this point in the history
* Update pixi lockfile

* adding Martin's changes again

* pch

* forgot some

* sphinx 8.2 causes issues

---------

Co-authored-by: quant-ranger[bot] <132915763+quant-ranger[bot]@users.noreply.github.com>
Co-authored-by: Marc-Antoine Schmidt <[email protected]>
  • Loading branch information
quant-ranger[bot] and MarcAntoineSchmidtQC authored Feb 19, 2025
1 parent af2676f commit 002989e
Show file tree
Hide file tree
Showing 7 changed files with 5,153 additions and 5,137 deletions.
10,271 changes: 5,146 additions & 5,125 deletions pixi.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ make = "*"
matplotlib-base = "*"
nbclassic = ">=0.2.8"
nbsphinx = ">=0.8.3"
sphinx = ">=3.5.3"
sphinx = ">=3.5.3,!=8.2"
sphinx_rtd_theme = "*"
sphinxcontrib-apidoc = "*"
sphinxext-altair = "*"
Expand Down
5 changes: 2 additions & 3 deletions src/glum/_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2372,8 +2372,7 @@ def _validate_hyperparameters(self) -> None:
if self.alpha_search:
if not isinstance(self.alpha, Iterable) and self.alpha is not None:
raise ValueError(
"`alpha` should be an Iterable or None when `alpha_search`"
" is True"
"`alpha` should be an Iterable or None when `alpha_search` is True"
)
if self.alpha is not None and (
(np.asarray(self.alpha) < 0).any()
Expand All @@ -2383,7 +2382,7 @@ def _validate_hyperparameters(self) -> None:
if not self.alpha_search:
if not np.isscalar(self.alpha) and self.alpha is not None:
raise ValueError(
"`alpha` should be a scalar or None when `alpha_search`" " is False"
"`alpha` should be a scalar or None when `alpha_search` is False"
)
if self.alpha is not None and (
not isinstance(self.alpha, (int, float)) or self.alpha < 0
Expand Down
2 changes: 1 addition & 1 deletion src/glum/_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,7 @@ def _lbfgs_solver(
)
if info["warnflag"] == 1:
warnings.warn(
"lbfgs failed to converge." " Increase the number of iterations.",
"lbfgs failed to converge. Increase the number of iterations.",
ConvergenceWarning,
)
elif info["warnflag"] == 2:
Expand Down
2 changes: 1 addition & 1 deletion src/glum_benchmarks/bench_liblinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def liblinear_bench(
solver="liblinear",
)

fit_args = dict(
fit_args = dict( # type: ignore
X=X,
y=dat["y"].astype(np.int64).copy(),
sample_weight=dat.get("sample_weight"),
Expand Down
4 changes: 2 additions & 2 deletions src/glum_benchmarks/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,8 @@ def get_obj_val(
X_dot_coef += offset

zeros = np.zeros(dat["X"].shape[0])
y = dat["y"].astype(coefs.dtype)
weights = dat.get("weights", np.ones_like(y)).astype(coefs.dtype)
y = dat["y"].astype(coefs.dtype) # type: ignore
weights = dat.get("weights", np.ones_like(y)).astype(coefs.dtype) # type: ignore
weights /= weights.sum()
P1 = l1_ratio * alpha * np.ones_like(coefs)
P2 = (1 - l1_ratio) * alpha * np.ones_like(coefs)
Expand Down
4 changes: 0 additions & 4 deletions tests/glm/test_glm_regressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -2197,10 +2197,6 @@ def test_drop_first_allows_alpha_equals_0():
regressor = GeneralizedLinearRegressor(drop_first=True)
regressor.fit(X, y)

regressor = GeneralizedLinearRegressor() # default is False
with pytest.raises(np.linalg.LinAlgError):
regressor.fit(X, y)


def test_dropping_distinct_categorical_column():
y = np.random.normal(size=10)
Expand Down

0 comments on commit 002989e

Please sign in to comment.