Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
a2cbbae
write up on gradients
palday Dec 29, 2025
b55d622
slight optimization of gradient computation
palday Dec 29, 2025
4aa750e
kb07
palday Dec 29, 2025
2765eef
Spelling mistakes?
dmbates Dec 30, 2025
6440274
Still not passing tests. In write-up made method comparisons fairer.
dmbates Dec 30, 2025
16cbd8e
test fix
palday Dec 30, 2025
f184c88
methods for HessianConfig and hessian!
palday Dec 30, 2025
8e8ee85
format
palday Dec 30, 2025
072bd5d
NEWS
palday Dec 30, 2025
03009d1
oops
palday Dec 30, 2025
0bfd467
docs fix: AoG update
palday Dec 30, 2025
463a7d6
Add information on gradient evaluation
dmbates Jan 3, 2026
ab0a7cb
Short-cut method of gradient evaluation
dmbates Jan 6, 2026
269826e
merge
palday Jan 6, 2026
2e8ac37
Partial gradient for vector-valued r.e.'s
dmbates Jan 9, 2026
37be044
Merge branch 'main' of github.com:JuliaStats/MixedModels.jl into db/p…
palday Jan 13, 2026
cfba091
Expand docs, start src/gradient.jl
dmbates Jan 14, 2026
637917a
Merge branch 'main' of github.com:JuliaStats/MixedModels.jl into db/p…
palday Jan 15, 2026
ad2cb99
Initial, clunky version of blocked grad eval.
dmbates Jan 23, 2026
afc23c6
Update document on gradient evaluation.
dmbates Jan 24, 2026
8f3981e
Fixed, I hope, the initialization of the gradient blocked matrix
dmbates Jan 25, 2026
89ba9cd
Formatting changes
dmbates Jan 25, 2026
aa54be8
Update gradient code and documents
dmbates Feb 2, 2026
b05b788
Adjust tests on gradient
dmbates Feb 3, 2026
a2f9e31
Expand exploration of gradient methods
dmbates Feb 3, 2026
ca8d1a9
Baseline code before correcting gradient! evaluation
dmbates Feb 23, 2026
0304ff8
Merge branches 'db/pa/gradient' and 'main' of github.com:JuliaStats/M…
palday Mar 9, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
- Additional methods for pre-allocated result arrays and `*Config` instances have been added to the ForwardDiff extension. [#871].
MixedModels v5.3.0 Release Notes
==============================
- Implement `sparseL` as a specialization of `sparsemat`. Replace `_coord` utility with `_findnz` which, in most cases, falls through to `SparseArrays.findnz`. [#880]
Expand Down Expand Up @@ -726,6 +727,7 @@ Package dependencies
[#864]: https://github.com/JuliaStats/MixedModels.jl/issues/864
[#865]: https://github.com/JuliaStats/MixedModels.jl/issues/865
[#867]: https://github.com/JuliaStats/MixedModels.jl/issues/867
[#871]: https://github.com/JuliaStats/MixedModels.jl/issues/871
[#873]: https://github.com/JuliaStats/MixedModels.jl/issues/873
[#875]: https://github.com/JuliaStats/MixedModels.jl/issues/875
[#876]: https://github.com/JuliaStats/MixedModels.jl/issues/876
Expand Down
56 changes: 50 additions & 6 deletions ext/MixedModelsForwardDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ using LinearAlgebra: LinearAlgebra,
using SparseArrays: SparseArrays, nzrange

# Stuff we're defining in this file
using ForwardDiff: ForwardDiff
using ForwardDiff: ForwardDiff,
Chunk,
GradientConfig,
HessianConfig
using MixedModels: fd_cholUnblocked!,
fd_deviance,
fd_logdet,
Expand Down Expand Up @@ -59,6 +62,16 @@ const FORWARDDIFF = """
should be included is currently still being decided.
"""

#####
##### Gradients
#####

function ForwardDiff.GradientConfig(
model::LinearMixedModel{T}, x::AbstractVector{T}=model.θ, chunk::Chunk=Chunk(x)
) where {T}
return GradientConfig(fd_deviance(model), x, chunk)
end

"""
ForwardDiff.gradient(model::LinearMixedModel)

Expand All @@ -68,9 +81,29 @@ values.
$(FORWARDDIFF)
"""
function ForwardDiff.gradient(
model::LinearMixedModel{T}, θ::Vector{T}=model.θ
model::LinearMixedModel{T}, θ::Vector{T}=model.θ,
cfg::GradientConfig=GradientConfig(model, θ),
check::Val{CHK}=Val(true),
) where {T,CHK}
return ForwardDiff.gradient!(similar(model.θ), model, θ, cfg, check)
end

function ForwardDiff.gradient!(result::AbstractArray,
model::LinearMixedModel{T}, θ::Vector{T}=model.θ,
cfg::GradientConfig=GradientConfig(model, θ),
check::Val{CHK}=Val(true),
) where {T,CHK}
return ForwardDiff.gradient!(result, fd_deviance(model), θ, cfg, check)
end

#####
##### Hessians
#####

function ForwardDiff.HessianConfig(
model::LinearMixedModel{T}, x::AbstractVector{T}=model.θ, chunk::Chunk=Chunk(x)
) where {T}
return ForwardDiff.gradient(fd_deviance(model), θ)
return HessianConfig(fd_deviance(model), x, chunk)
end

"""
Expand All @@ -82,9 +115,20 @@ values.
$(FORWARDDIFF)
"""
function ForwardDiff.hessian(
model::LinearMixedModel{T}, θ::Vector{T}=model.θ
) where {T}
return ForwardDiff.hessian(fd_deviance(model), θ)
model::LinearMixedModel{T}, θ::Vector{T}=model.θ,
cfg::HessianConfig=HessianConfig(model, θ),
check::Val{CHK}=Val(true),
) where {T,CHK}
n = length(θ)
return ForwardDiff.hessian!(Matrix{T}(undef, n, n), model, θ, cfg, check)
end

function ForwardDiff.hessian!(result::AbstractArray,
model::LinearMixedModel{T}, θ::Vector{T}=model.θ,
cfg::HessianConfig=HessianConfig(model, θ),
check::Val{CHK}=Val(true),
) where {T,CHK}
return ForwardDiff.hessian!(result, fd_deviance(model), θ, cfg, check)
end

#####
Expand Down
4 changes: 4 additions & 0 deletions gradients/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
*.html
*\~
*.swp

Loading
Loading