Skip to content

Commit

Permalink
Merge branch 'main' into glum-v3
Browse files Browse the repository at this point in the history
Merge conflicts were due to new black version.
  • Loading branch information
stanmart committed Feb 6, 2024
2 parents 5dfd446 + 6202d6b commit 9c04a08
Show file tree
Hide file tree
Showing 9 changed files with 45 additions and 41 deletions.
14 changes: 8 additions & 6 deletions .github/workflows/build_wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ jobs:
uses: pypa/[email protected]
env:
CIBW_ARCHS_MACOS: x86_64 arm64
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
path: ./wheelhouse/*.whl
name: wheels-${{ matrix.os }}

build_sdist:
name: Build source distribution
Expand All @@ -37,9 +38,10 @@ jobs:
run: python -m pip install setuptools setuptools-scm wheel Cython numpy scikit-learn
- name: Build sdist
run: python setup.py sdist
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
path: dist/*.tar.gz
name: sdist

upload_testpypi:
if: github.event_name == 'release' && github.event.action == 'published'
Expand All @@ -51,9 +53,9 @@ jobs:
permissions:
id-token: write
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: artifact
merge-multiple: true
path: dist
- uses: pypa/[email protected]
with:
Expand All @@ -69,8 +71,8 @@ jobs:
permissions:
id-token: write
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: artifact
path: dist
merge-multiple: true
- uses: pypa/[email protected]
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
repos:
- repo: https://github.com/Quantco/pre-commit-mirrors-black
rev: 23.12.1
rev: 24.1.1
hooks:
- id: black-conda
args:
- --safe
- --target-version=py39
- repo: https://github.com/Quantco/pre-commit-mirrors-flake8
rev: 6.1.0
rev: 7.0.0
hooks:
- id: flake8-conda
additional_dependencies: [
Expand Down
16 changes: 9 additions & 7 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@
package_dir={"": "src"},
packages=find_packages(
where="src",
include=["glum"]
if os.environ.get("CONDA_BUILD")
else ["glum", "glum_benchmarks"],
include=(
["glum"] if os.environ.get("CONDA_BUILD") else ["glum", "glum_benchmarks"]
),
),
python_requires=">=3.9",
install_requires=[
Expand All @@ -89,13 +89,15 @@
"formulaic>=0.6",
"tabmat>=4.0.0a3",
],
entry_points=None
if os.environ.get("CONDA_BUILD")
else """
entry_points=(
None
if os.environ.get("CONDA_BUILD")
else """
[console_scripts]
glm_benchmarks_run = glum_benchmarks.cli_run:cli_run
glm_benchmarks_analyze = glum_benchmarks.cli_analyze:cli_analyze
""",
"""
),
ext_modules=cythonize(
ext_modules,
annotate=False,
Expand Down
4 changes: 1 addition & 3 deletions src/glum/_distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -942,9 +942,7 @@ def unit_deviance(self, y: np.ndarray, mu: np.ndarray) -> np.ndarray:
-------
array-like
"""
return 2 * y * (np.arctan(y) - np.arctan(mu)) + np.log(
(1 + mu**2) / (1 + y**2)
)
return 2 * y * (np.arctan(y) - np.arctan(mu)) + np.log((1 + mu**2) / (1 + y**2))


class BinomialDistribution(ExponentialDispersionModel):
Expand Down
22 changes: 12 additions & 10 deletions src/glum/_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2759,16 +2759,18 @@ def _expand_categorical_penalties(
return np.array(
list(
chain.from_iterable(
[
elmt
for _ in range(
len(dtype.categories)
+ has_missing_category[col]
- drop_first
)
]
if pd.api.types.is_categorical_dtype(dtype)
else [elmt]
(
[
elmt
for _ in range(
len(dtype.categories)
+ has_missing_category[col]
- drop_first
)
]
if pd.api.types.is_categorical_dtype(dtype)
else [elmt]
)
for elmt, (col, dtype) in zip(
penalty, X.dtypes.items()
)
Expand Down
8 changes: 5 additions & 3 deletions src/glum_benchmarks/bench_liblinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,11 @@ def liblinear_bench(
model_args = dict(
penalty=pen,
tol=benchmark_convergence_tolerance,
C=1 / (X.shape[0] * alpha)
if reg_multiplier is None
else 1 / (X.shape[0] * alpha * reg_multiplier),
C=(
1 / (X.shape[0] * alpha)
if reg_multiplier is None
else 1 / (X.shape[0] * alpha * reg_multiplier)
),
# Note that when an intercept is fitted, it is subject to regularization, unlike
# other solvers. intercept_scaling helps combat this by inflating the intercept
# column, though too low of a value leaves too much regularization and too high
Expand Down
10 changes: 6 additions & 4 deletions src/glum_benchmarks/cli_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,12 +81,14 @@ def cli_run(
for Ln in libraries.keys():
click.echo(f"running problem={Pn} library={Ln}")
new_params = params.update_params(problem_name=Pn, library_name=Ln)
result, regularization_strength_ = execute_problem_library(
result, _ = execute_problem_library(
new_params,
iterations,
defaults["diagnostics_level"]
if params.diagnostics_level is None
else params.diagnostics_level, # type: ignore
(
defaults["diagnostics_level"] # type: ignore
if params.diagnostics_level is None # type: ignore
else params.diagnostics_level # type: ignore
),
)
_save_benchmark_results(
output_dir,
Expand Down
4 changes: 1 addition & 3 deletions src/glum_benchmarks/orig_sklearn_fork/_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -889,9 +889,7 @@ def unit_variance_derivative(self, mu):
return 2 * mu

def unit_deviance(self, y, mu):
return 2 * y * (np.arctan(y) - np.arctan(mu)) + np.log(
(1 + mu**2) / (1 + y**2)
)
return 2 * y * (np.arctan(y) - np.arctan(mu)) + np.log((1 + mu**2) / (1 + y**2))


class BinomialDistribution(ExponentialDispersionModel):
Expand Down
4 changes: 1 addition & 3 deletions src/glum_benchmarks/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,9 +409,7 @@ def clear_cache(force=False):
if cache_location is None:
return

cache_size_limit = float(
os.environ.get("GLM_BENCHMARKS_CACHE_SIZE_LIMIT", 1024**3)
)
cache_size_limit = float(os.environ.get("GLM_BENCHMARKS_CACHE_SIZE_LIMIT", 1024**3))

if force or _get_size_of_cache_directory() > cache_size_limit:
shutil.rmtree(cache_location)
Expand Down

0 comments on commit 9c04a08

Please sign in to comment.