diff --git a/Project.toml b/Project.toml index 4f199888d..a43284ca4 100644 --- a/Project.toml +++ b/Project.toml @@ -57,7 +57,7 @@ NonlinearSolveSymbolicsExt = "Symbolics" NonlinearSolveZygoteExt = "Zygote" [compat] -ADTypes = "0.2.6" +ADTypes = "1.1.0" Aqua = "0.8" ArrayInterface = "7.9" BandedMatrices = "1.5" @@ -69,7 +69,7 @@ Enzyme = "0.11.15, 0.12" FastBroadcast = "0.2.8" FastClosures = "0.3.2" FastLevenbergMarquardt = "0.1" -FiniteDiff = "2.21" +FiniteDiff = "2.22" FixedPointAcceleration = "0.3" ForwardDiff = "0.10.36" LazyArrays = "1.8.2" @@ -83,7 +83,7 @@ NLSolvers = "0.5" NLsolve = "4.5" NaNMath = "1" NonlinearProblemLibrary = "0.1.2" -OrdinaryDiffEq = "6.74" +OrdinaryDiffEq = "6.75" Pkg = "1.10" PrecompileTools = "1.2" Preferences = "1.4" @@ -94,17 +94,17 @@ RecursiveArrayTools = "3.8" Reexport = "1.2" SIAMFANLEquations = "1.0.1" SafeTestsets = "0.1" -SciMLBase = "2.28.0" -SimpleNonlinearSolve = "1.2" +SciMLBase = "2.34.0" +SimpleNonlinearSolve = "1.8" SparseArrays = "1.10" -SparseDiffTools = "2.17" +SparseDiffTools = "2.19" SpeedMapping = "0.3" StableRNGs = "1" -StaticArrays = "1.7" +StaticArrays = "1.9" StaticArraysCore = "1.4" Sundials = "4.23.1" Symbolics = "5.13" -SymbolicIndexingInterface = "0.3.3" +SymbolicIndexingInterface = "0.3.15" Test = "1.10" TimerOutputs = "0.5.23" Zygote = "0.6.69" diff --git a/docs/src/basics/autodiff.md b/docs/src/basics/autodiff.md index 73e096a4b..baa605363 100644 --- a/docs/src/basics/autodiff.md +++ b/docs/src/basics/autodiff.md @@ -3,19 +3,16 @@ ## Summary of Finite Differencing Backends - [`AutoFiniteDiff`](@ref): Finite differencing, not optimal but always applicable. - - [`AutoSparseFiniteDiff`](@ref): Sparse version of [`AutoFiniteDiff`](@ref). ## Summary of Forward Mode AD Backends - [`AutoForwardDiff`](@ref): The best choice for dense problems. - - [`AutoSparseForwardDiff`](@ref): Sparse version of [`AutoForwardDiff`](@ref). - [`AutoPolyesterForwardDiff`](@ref): Might be faster than [`AutoForwardDiff`](@ref) for large problems. Requires `PolyesterForwardDiff.jl` to be installed and loaded. ## Summary of Reverse Mode AD Backends - [`AutoZygote`](@ref): The fastest choice for non-mutating array-based (BLAS) functions. - - [`AutoSparseZygote`](@ref): Sparse version of [`AutoZygote`](@ref). - [`AutoEnzyme`](@ref): Uses `Enzyme.jl` Reverse Mode and should be considered experimental. @@ -26,29 +23,32 @@ !!! note - The `Sparse` versions of the methods refers to automated sparsity detection. These + The sparse versions of the methods refer to automated sparsity detection. These methods automatically discover the sparse Jacobian form from the function `f`. Note that all methods specialize the differentiation on a sparse Jacobian if the sparse Jacobian is given as `prob.f.jac_prototype` in the `NonlinearFunction` definition, and the `AutoSparse` here simply refers to whether this `jac_prototype` should be generated automatically. For more details, see [SparseDiffTools.jl](https://github.com/JuliaDiff/SparseDiffTools.jl) and - [Sparsity Detection Manual Entry](@ref sparsity-detection). + [Sparsity Detection Manual Entry](@ref sparsity-detection), as well as the + documentation of [ADTypes.jl](https://github.com/SciML/ADTypes.jl). ## API Reference +```@docs +AutoSparse +``` + ### Finite Differencing Backends ```@docs AutoFiniteDiff -AutoSparseFiniteDiff ``` ### Forward Mode AD Backends ```@docs AutoForwardDiff -AutoSparseForwardDiff AutoPolyesterForwardDiff ``` @@ -56,7 +56,5 @@ AutoPolyesterForwardDiff ```@docs AutoZygote -AutoSparseZygote AutoEnzyme -NonlinearSolve.AutoSparseEnzyme ``` diff --git a/docs/src/basics/sparsity_detection.md b/docs/src/basics/sparsity_detection.md index 208fc306e..97891be61 100644 --- a/docs/src/basics/sparsity_detection.md +++ b/docs/src/basics/sparsity_detection.md @@ -59,9 +59,9 @@ refer to the documentation there for more details. If you constructed a Nonlinear Solver with a sparse AD type, for example ```julia -NewtonRaphson(; autodiff = AutoSparseForwardDiff()) +NewtonRaphson(; autodiff = AutoSparse(AutoForwardDiff())) # OR -TrustRegion(; autodiff = AutoSparseZygote()) +TrustRegion(; autodiff = AutoSparse(AutoZygote())) ``` then NonlinearSolve will automatically perform matrix coloring and use sparse diff --git a/docs/src/tutorials/large_systems.md b/docs/src/tutorials/large_systems.md index 19e7493d5..4e732b9c0 100644 --- a/docs/src/tutorials/large_systems.md +++ b/docs/src/tutorials/large_systems.md @@ -128,7 +128,7 @@ include: In the next section, we will discuss how to declare a sparse Jacobian and how to use [Symbolics.jl](https://github.com/JuliaSymbolics/Symbolics.jl), to compute exact sparse jacobians. This is triggered if you pass in a sparse autodiff type such as -`AutoSparseForwardDiff()`. If `Symbolics.jl` is loaded, then the default changes to +`AutoSparse(AutoForwardDiff())`. If `Symbolics.jl` is loaded, then the default changes to Symbolic Sparsity Detection. See the manual entry on [Sparsity Detection](@ref sparsity-detection) for more details on the default. @@ -137,13 +137,13 @@ using BenchmarkTools # for @btime @btime solve(prob_brusselator_2d, NewtonRaphson()); @btime solve(prob_brusselator_2d, - NewtonRaphson(; autodiff = AutoSparseForwardDiff(; chunksize = 32))); + NewtonRaphson(; autodiff = AutoSparse(AutoForwardDiff(; chunksize = 32)))); @btime solve(prob_brusselator_2d, - NewtonRaphson(; - autodiff = AutoSparseForwardDiff(; chunksize = 32), linsolve = KLUFactorization())); + NewtonRaphson(; autodiff = AutoSparse(AutoForwardDiff(; chunksize = 32)), + linsolve = KLUFactorization())); @btime solve(prob_brusselator_2d, - NewtonRaphson(; - autodiff = AutoSparseForwardDiff(; chunksize = 32), linsolve = KrylovJL_GMRES())); + NewtonRaphson(; autodiff = AutoSparse(AutoForwardDiff(; chunksize = 32)), + linsolve = KrylovJL_GMRES())); nothing # hide ``` diff --git a/src/NonlinearSolve.jl b/src/NonlinearSolve.jl index 40890d1e3..8e39c6f1d 100644 --- a/src/NonlinearSolve.jl +++ b/src/NonlinearSolve.jl @@ -27,7 +27,7 @@ import PrecompileTools: @recompile_invalidations, @compile_workload, @setup_work import SciMLBase: AbstractNonlinearAlgorithm, JacobianWrapper, AbstractNonlinearProblem, AbstractSciMLOperator, NLStats, _unwrap_val, has_jac, isinplace - import SparseDiffTools: AbstractSparsityDetection, AutoSparseEnzyme + import SparseDiffTools: AbstractSparsityDetection import StaticArraysCore: StaticArray, SVector, SArray, MArray, Size, SMatrix, MMatrix import SymbolicIndexingInterface: SymbolicIndexingInterface, ParameterIndexingProxy, symbolic_container, parameter_values, state_values, @@ -36,9 +36,6 @@ end @reexport using ADTypes, SciMLBase, SimpleNonlinearSolve -const AbstractSparseADType = Union{ADTypes.AbstractSparseFiniteDifferences, - ADTypes.AbstractSparseForwardMode, ADTypes.AbstractSparseReverseMode} - # Type-Inference Friendly Check for Extension Loading is_extension_loaded(::Val) = false @@ -121,18 +118,18 @@ include("default.jl") @compile_workload begin @sync begin - for T in (Float32, Float64), (fn, u0) in nlfuncs - Threads.@spawn NonlinearProblem(fn, T.(u0), T(2)) - end - for (fn, u0) in nlfuncs - Threads.@spawn NonlinearLeastSquaresProblem(fn, u0, 2.0) - end - for prob in probs_nls, alg in nls_algs - Threads.@spawn solve(prob, alg; abstol = 1e-2, verbose = false) - end - for prob in probs_nlls, alg in nlls_algs - Threads.@spawn solve(prob, alg; abstol = 1e-2, verbose = false) - end + for T in (Float32, Float64), (fn, u0) in nlfuncs + Threads.@spawn NonlinearProblem(fn, T.(u0), T(2)) + end + for (fn, u0) in nlfuncs + Threads.@spawn NonlinearLeastSquaresProblem(fn, u0, 2.0) + end + for prob in probs_nls, alg in nls_algs + Threads.@spawn solve(prob, alg; abstol = 1e-2, verbose = false) + end + for prob in probs_nlls, alg in nlls_algs + Threads.@spawn solve(prob, alg; abstol = 1e-2, verbose = false) + end end end end diff --git a/src/adtypes.jl b/src/adtypes.jl index 9ed3107c5..0ee20effb 100644 --- a/src/adtypes.jl +++ b/src/adtypes.jl @@ -22,17 +22,6 @@ error into the derivative estimates. """ AutoFiniteDiff -""" - AutoSparseFiniteDiff() - -Sparse Version of [`AutoFiniteDiff`](@ref) that uses -[FiniteDiff.jl](https://github.com/JuliaDiff/FiniteDiff.jl) and the column color vector of -the Jacobian Matrix to efficiently compute the Sparse Jacobian. - - - Supports both inplace and out-of-place functions -""" -AutoSparseFiniteDiff - """ AutoForwardDiff(; chunksize = nothing, tag = nothing) AutoForwardDiff{chunksize, tagType}(tag::tagType) @@ -56,27 +45,6 @@ For type-stability of internal operations, a positive `chunksize` must be provid """ AutoForwardDiff -""" - AutoSparseForwardDiff(; chunksize = nothing, tag = nothing) - AutoSparseForwardDiff{chunksize, tagType}(tag::tagType) - -Sparse Version of [`AutoForwardDiff`](@ref) that uses -[ForwardDiff.jl](https://github.com/JuliaDiff/ForwardDiff.jl) and the column color vector of -the Jacobian Matrix to efficiently compute the Sparse Jacobian. - - - Supports both inplace and out-of-place functions - -For type-stability of internal operations, a positive `chunksize` must be provided. - -### Keyword Arguments - - - `chunksize`: Count of dual numbers that can be propagated simultaneously. Setting this - number to a high value will lead to slowdowns. Use - [`NonlinearSolve.pickchunksize`](@ref) to get a proper value. - - `tag`: Used to avoid perturbation confusion. If set to `nothing`, we use a custom tag. -""" -AutoSparseForwardDiff - """ AutoPolyesterForwardDiff(; chunksize = nothing) @@ -108,20 +76,6 @@ jacobians. """ AutoZygote -""" - AutoSparseZygote() - -Sparse version of [`AutoZygote`](@ref) that uses -[`Zygote.jl`](https://github.com/FluxML/Zygote.jl) and the row color vector of -the Jacobian Matrix to efficiently compute the Sparse Jacobian. - - - Supports only out-of-place functions - -This is efficient only for long jacobians or if the maximum value of the row color vector is -significantly lower than the maximum value of the column color vector. -""" -AutoSparseZygote - """ AutoEnzyme() @@ -134,7 +88,7 @@ and VJP support is currently not implemented. AutoEnzyme """ - AutoSparseEnzyme() + AutoSparse(AutoEnzyme()) Sparse version of [`AutoEnzyme`](@ref) that uses [Enzyme.jl](https://github.com/EnzymeAD/Enzyme.jl) and the row color vector of @@ -142,7 +96,45 @@ the Jacobian Matrix to efficiently compute the Sparse Jacobian. - Supports both inplace and out-of-place functions +This is efficient only for long jacobians or if the maximum value of the row color vector is +significantly lower than the maximum value of the column color vector. + + AutoSparse(AutoFiniteDiff()) + +Sparse Version of [`AutoFiniteDiff`](@ref) that uses +[FiniteDiff.jl](https://github.com/JuliaDiff/FiniteDiff.jl) and the column color vector of +the Jacobian Matrix to efficiently compute the Sparse Jacobian. + + - Supports both inplace and out-of-place functions + + AutoSparse(AutoForwardDiff(; chunksize = nothing, tag = nothing)) + AutoSparse(AutoForwardDiff{chunksize, tagType}(tag::tagType)) + +Sparse Version of [`AutoForwardDiff`](@ref) that uses +[ForwardDiff.jl](https://github.com/JuliaDiff/ForwardDiff.jl) and the column color vector of +the Jacobian Matrix to efficiently compute the Sparse Jacobian. + + - Supports both inplace and out-of-place functions + +For type-stability of internal operations, a positive `chunksize` must be provided. + +### Keyword Arguments + + - `chunksize`: Count of dual numbers that can be propagated simultaneously. Setting this + number to a high value will lead to slowdowns. Use + [`NonlinearSolve.pickchunksize`](@ref) to get a proper value. + + - `tag`: Used to avoid perturbation confusion. If set to `nothing`, we use a custom tag. + + AutoSparse(AutoZygote()) + +Sparse version of [`AutoZygote`](@ref) that uses +[`Zygote.jl`](https://github.com/FluxML/Zygote.jl) and the row color vector of +the Jacobian Matrix to efficiently compute the Sparse Jacobian. + + - Supports only out-of-place functions + This is efficient only for long jacobians or if the maximum value of the row color vector is significantly lower than the maximum value of the column color vector. """ -AutoSparseEnzyme +AutoSparse diff --git a/src/algorithms/trust_region.jl b/src/algorithms/trust_region.jl index ac8f42d35..d68e2d9ec 100644 --- a/src/algorithms/trust_region.jl +++ b/src/algorithms/trust_region.jl @@ -26,13 +26,14 @@ function TrustRegion(; concrete_jac = nothing, linsolve = nothing, precs = DEFAU shrink_factor::Real = 1 // 4, expand_factor::Real = 2 // 1, max_shrink_times::Int = 32, autodiff = nothing, vjp_autodiff = nothing) descent = Dogleg(; linsolve, precs) - if autodiff isa - Union{ADTypes.AbstractForwardMode, ADTypes.AbstractFiniteDifferencesMode} + if autodiff !== nothing && ADTypes.mode(autodiff) isa ADTypes.ForwardMode forward_ad = autodiff else forward_ad = nothing end - if isnothing(vjp_autodiff) && autodiff isa ADTypes.AbstractFiniteDifferencesMode + if isnothing(vjp_autodiff) && + autodiff isa Union{ADTypes.AutoFiniteDiff, ADTypes.AutoFiniteDifferences} + # TODO: why not just ForwardMode? vjp_autodiff = autodiff end trustregion = GenericTrustRegionScheme(; diff --git a/src/core/generalized_first_order.jl b/src/core/generalized_first_order.jl index 7d20b0bfc..c16cf043d 100644 --- a/src/core/generalized_first_order.jl +++ b/src/core/generalized_first_order.jl @@ -55,10 +55,14 @@ function GeneralizedFirstOrderAlgorithm{concrete_jac, name}(; descent, linesearch = missing, trustregion = missing, jacobian_ad = nothing, forward_ad = nothing, reverse_ad = nothing, max_shrink_times::Int = typemax(Int)) where {concrete_jac, name} - forward_ad = ifelse(forward_ad !== nothing, forward_ad, - ifelse(jacobian_ad isa ADTypes.AbstractForwardMode, jacobian_ad, nothing)) - reverse_ad = ifelse(reverse_ad !== nothing, reverse_ad, - ifelse(jacobian_ad isa ADTypes.AbstractReverseMode, jacobian_ad, nothing)) + forward_ad = ifelse(forward_ad !== nothing, + forward_ad, + ifelse(jacobian_ad !== nothing && ADTypes.mode(jacobian_ad) isa ADTypes.ForwardMode, + jacobian_ad, nothing)) + reverse_ad = ifelse(reverse_ad !== nothing, + reverse_ad, + ifelse(jacobian_ad !== nothing && ADTypes.mode(jacobian_ad) isa ADTypes.ReverseMode, + jacobian_ad, nothing)) if linesearch !== missing && !(linesearch isa AbstractNonlinearSolveLineSearchAlgorithm) Base.depwarn("Passing in a `LineSearches.jl` algorithm directly is deprecated. \ diff --git a/src/globalization/line_search.jl b/src/globalization/line_search.jl index 5c76d335a..3cd30424f 100644 --- a/src/globalization/line_search.jl +++ b/src/globalization/line_search.jl @@ -122,7 +122,7 @@ function __internal_init( end else autodiff = get_concrete_reverse_ad( - alg.autodiff, prob; check_forward_mode = true) + alg.autodiff, prob; check_reverse_mode = true) vjp_op = VecJacOperator(prob, fu, u; autodiff) if isinplace(prob) g_cache = similar(u) diff --git a/src/internal/approximate_initialization.jl b/src/internal/approximate_initialization.jl index 60f4a7584..a1f36f475 100644 --- a/src/internal/approximate_initialization.jl +++ b/src/internal/approximate_initialization.jl @@ -149,7 +149,7 @@ function __internal_init( prob::AbstractNonlinearProblem, alg::TrueJacobianInitialization, solver, f::F, fu, u, p; linsolve = missing, internalnorm::IN = DEFAULT_NORM, kwargs...) where {F, IN} autodiff = get_concrete_forward_ad( - alg.autodiff, prob; check_reverse_mode = false, kwargs...) + alg.autodiff, prob; check_forward_mode = false, kwargs...) jac_cache = JacobianCache(prob, solver, prob.f, fu, u, p; autodiff, linsolve) J = alg.structure(jac_cache(nothing)) return InitializedApproximateJacobianCache( diff --git a/src/internal/helpers.jl b/src/internal/helpers.jl index fb4af1121..9a18d0817 100644 --- a/src/internal/helpers.jl +++ b/src/internal/helpers.jl @@ -30,17 +30,11 @@ function evaluate_f!!(f::NonlinearFunction{iip}, fu, u, p) where {iip} end # AutoDiff Selection Functions -function get_concrete_forward_ad( - autodiff::Union{ADTypes.AbstractForwardMode, ADTypes.AbstractFiniteDifferencesMode}, - prob, sp::Val{test_sparse} = True, args...; kwargs...) where {test_sparse} - return autodiff -end function get_concrete_forward_ad( autodiff::ADTypes.AbstractADType, prob, sp::Val{test_sparse} = True, - args...; check_reverse_mode = true, kwargs...) where {test_sparse} - if check_reverse_mode - @warn "$(autodiff)::$(typeof(autodiff)) is not a \ - `Abstract(Forward/FiniteDifferences)Mode`. Use with caution." maxlog=1 + args...; check_forward_mode = true, kwargs...) where {test_sparse} + if !isa(ADTypes.mode(autodiff), ADTypes.ForwardMode) && check_forward_mode + @warn "$(autodiff)::$(typeof(autodiff)) is not a `ForwardMode`. Use with caution." maxlog=1 end return autodiff end @@ -53,37 +47,28 @@ function get_concrete_forward_ad( use_sparse_ad = false end ad = if !ForwardDiff.can_dual(eltype(prob.u0)) # Use Finite Differencing - use_sparse_ad ? AutoSparseFiniteDiff() : AutoFiniteDiff() + use_sparse_ad ? AutoSparse(AutoFiniteDiff()) : AutoFiniteDiff() else - (use_sparse_ad ? AutoSparseForwardDiff : AutoForwardDiff)() + use_sparse_ad ? AutoSparse(AutoForwardDiff()) : AutoForwardDiff() end return ad end function get_concrete_reverse_ad( - autodiff::Union{ADTypes.AbstractReverseMode, ADTypes.AbstractFiniteDifferencesMode}, - prob, sp::Val{test_sparse} = True, args...; kwargs...) where {test_sparse} - return autodiff -end -function get_concrete_reverse_ad(autodiff::Union{AutoZygote, AutoSparseZygote}, prob, - sp::Val{test_sparse} = True, args...; kwargs...) where {test_sparse} - if isinplace(prob) + autodiff::ADTypes.AbstractADType, prob, sp::Val{test_sparse} = True, + args...; check_reverse_mode = true, kwargs...) where {test_sparse} + if !isa(ADTypes.mode(autodiff), ADTypes.ReverseMode) && check_reverse_mode + @warn "$(autodiff)::$(typeof(autodiff)) is not a `ReverseMode`. Use with caution." maxlog=1 + end + if autodiff isa Union{AutoZygote, AutoSparse{<:AutoZygote}} && isinplace(prob) @warn "Attempting to use Zygote.jl for inplace problems. Switching to FiniteDiff. \ Sparsity even if present will be ignored for correctness purposes. Set \ the reverse ad option to `nothing` to automatically select the best option \ and exploit sparsity." return AutoFiniteDiff() # colorvec confusion will occur if we use FiniteDiff + else + return autodiff end - return autodiff -end -function get_concrete_reverse_ad( - autodiff::ADTypes.AbstractADType, prob, sp::Val{test_sparse} = True, - args...; check_reverse_mode = true, kwargs...) where {test_sparse} - if check_reverse_mode - @warn "$(autodiff)::$(typeof(autodiff)) is not a \ - `Abstract(Forward/FiniteDifferences)Mode`. Use with caution." maxlog=1 - end - return autodiff end function get_concrete_reverse_ad( autodiff, prob, sp::Val{test_sparse} = True, args...; kwargs...) where {test_sparse} @@ -94,9 +79,9 @@ function get_concrete_reverse_ad( use_sparse_ad = false end ad = if isinplace(prob) || !is_extension_loaded(Val(:Zygote)) # Use Finite Differencing - use_sparse_ad ? AutoSparseFiniteDiff() : AutoFiniteDiff() + use_sparse_ad ? AutoSparse(AutoFiniteDiff()) : AutoFiniteDiff() else - use_sparse_ad ? AutoSparseZygote() : AutoZygote() + use_sparse_ad ? AutoSparse(AutoZygote()) : AutoZygote() end return ad end diff --git a/src/internal/jacobian.jl b/src/internal/jacobian.jl index 221bc5d62..b9cd6d352 100644 --- a/src/internal/jacobian.jl +++ b/src/internal/jacobian.jl @@ -56,11 +56,11 @@ function JacobianCache( iip = isinplace(prob) uf = JacobianWrapper{iip}(f, p) - autodiff = get_concrete_forward_ad(autodiff, prob; check_reverse_mode = false) + autodiff = get_concrete_forward_ad(autodiff, prob; check_forward_mode = false) jvp_autodiff = get_concrete_forward_ad( - jvp_autodiff, prob, Val(false); check_reverse_mode = true) + jvp_autodiff, prob, Val(false); check_forward_mode = true) vjp_autodiff = get_concrete_reverse_ad( - vjp_autodiff, prob, Val(false); check_forward_mode = false) + vjp_autodiff, prob, Val(false); check_reverse_mode = false) has_analytic_jac = SciMLBase.has_jac(f) linsolve_needs_jac = concrete_jac(alg) === nothing && (linsolve === missing || @@ -100,7 +100,7 @@ end function JacobianCache( prob, alg, f::F, ::Number, u::Number, p; autodiff = nothing, kwargs...) where {F} uf = JacobianWrapper{false}(f, p) - autodiff = get_concrete_forward_ad(autodiff, prob; check_reverse_mode = false) + autodiff = get_concrete_forward_ad(autodiff, prob; check_forward_mode = false) if !(autodiff isa AutoForwardDiff || autodiff isa AutoPolyesterForwardDiff || autodiff isa AutoFiniteDiff) @@ -154,7 +154,7 @@ end # Sparsity Detection Choices @inline __sparsity_detection_alg(_, _) = NoSparsityDetection() -@inline function __sparsity_detection_alg(f::NonlinearFunction, ad::AbstractSparseADType) +@inline function __sparsity_detection_alg(f::NonlinearFunction, ad::AutoSparse) if f.sparsity === nothing if f.jac_prototype === nothing if is_extension_loaded(Val(:Symbolics)) @@ -184,8 +184,10 @@ end end if SciMLBase.has_colorvec(f) - return PrecomputedJacobianColorvec(; jac_prototype, f.colorvec, - partition_by_rows = ad isa ADTypes.AbstractSparseReverseMode) + return PrecomputedJacobianColorvec(; jac_prototype, + f.colorvec, + partition_by_rows = (ad isa AutoSparse && + ADTypes.mode(ad) isa ADTypes.ReverseMode)) else return JacPrototypeSparsityDetection(; jac_prototype) end diff --git a/src/utils.jl b/src/utils.jl index a0cc5744b..9db6e1316 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -21,7 +21,7 @@ end @inline __needs_concrete_A(::typeof(\)) = true @inline __needs_concrete_A(linsolve) = needs_concrete_A(linsolve) -@inline __maybe_mutable(x, ::AutoSparseEnzyme) = __mutable(x) +@inline __maybe_mutable(x, ::AutoSparse{<:AutoEnzyme}) = __mutable(x) # TODO: remove? @inline __maybe_mutable(x, _) = x @inline @generated function _vec(v) @@ -77,10 +77,7 @@ LazyArrays.applied_axes(::typeof(__zero), x) = axes(x) @inline __maybe_symmetric(x::SciMLOperators.AbstractSciMLOperator) = x # SparseAD --> NonSparseAD -@inline __get_nonsparse_ad(::AutoSparseForwardDiff) = AutoForwardDiff() -@inline __get_nonsparse_ad(::AutoSparsePolyesterForwardDiff) = AutoPolyesterForwardDiff() -@inline __get_nonsparse_ad(::AutoSparseFiniteDiff) = AutoFiniteDiff() -@inline __get_nonsparse_ad(::AutoSparseZygote) = AutoZygote() +@inline __get_nonsparse_ad(backend::AutoSparse) = ADTypes.dense_ad(backend) @inline __get_nonsparse_ad(ad) = ad # Simple Checks diff --git a/test/core/rootfind_tests.jl b/test/core/rootfind_tests.jl index caa776d5a..688678ff2 100644 --- a/test/core/rootfind_tests.jl +++ b/test/core/rootfind_tests.jl @@ -5,11 +5,11 @@ using Reexport function __autosparseenzyme() @static if Sys.iswindows() - @warn "Enzyme on Windows stalls. Using AutoSparseFiniteDiff instead till \ + @warn "Enzyme on Windows stalls. Using AutoSparse(AutoFiniteDiff()) instead till \ https://github.com/EnzymeAD/Enzyme.jl/issues/1236 is resolved." - return AutoSparseFiniteDiff() + return AutoSparse(AutoFiniteDiff()) else - return AutoSparseEnzyme() + return AutoSparse(AutoEnzyme()) end end @@ -112,8 +112,8 @@ end @test nlprob_iterator_interface(quadratic_f!, p, Val(true), NewtonRaphson()) ≈ sqrt.(p) @testset "ADType: $(autodiff) u0: $(_nameof(u0))" for autodiff in ( - AutoSparseForwardDiff(), AutoSparseFiniteDiff(), - AutoZygote(), AutoSparseZygote(), __autosparseenzyme()), + AutoSparse(AutoForwardDiff()), AutoSparse(AutoFiniteDiff()), + AutoZygote(), AutoSparse(AutoZygote()), __autosparseenzyme()), u0 in (1.0, [1.0, 1.0]) probN = NonlinearProblem(quadratic_f, u0, 2.0) @@ -175,8 +175,8 @@ end @test nlprob_iterator_interface(quadratic_f!, p, Val(true), TrustRegion()) ≈ sqrt.(p) @testset "ADType: $(autodiff) u0: $(_nameof(u0)) radius_update_scheme: $(radius_update_scheme)" for autodiff in ( - AutoSparseForwardDiff(), AutoSparseFiniteDiff(), - AutoZygote(), AutoSparseZygote(), __autosparseenzyme()), + AutoSparse(AutoForwardDiff()), AutoSparse(AutoFiniteDiff()), + AutoZygote(), AutoSparse(AutoZygote()), __autosparseenzyme()), u0 in (1.0, [1.0, 1.0]), radius_update_scheme in radius_update_schemes @@ -271,8 +271,8 @@ end end @testset "ADType: $(autodiff) u0: $(_nameof(u0))" for autodiff in ( - AutoSparseForwardDiff(), AutoSparseFiniteDiff(), - AutoZygote(), AutoSparseZygote(), __autosparseenzyme()), + AutoSparse(AutoForwardDiff()), AutoSparse(AutoFiniteDiff()), + AutoZygote(), AutoSparse(AutoZygote()), __autosparseenzyme()), u0 in (1.0, [1.0, 1.0]) probN = NonlinearProblem(quadratic_f, u0, 2.0) @@ -453,8 +453,8 @@ end quadratic_f!, p, Val(true), PseudoTransient(; alpha_initial = 10.0)) ≈ sqrt.(p) @testset "ADType: $(autodiff) u0: $(_nameof(u0))" for autodiff in ( - AutoSparseForwardDiff(), AutoSparseFiniteDiff(), - AutoZygote(), AutoSparseZygote(), __autosparseenzyme()), + AutoSparse(AutoForwardDiff()), AutoSparse(AutoFiniteDiff()), + AutoZygote(), AutoSparse(AutoZygote()), __autosparseenzyme()), u0 in (1.0, [1.0, 1.0]) probN = NonlinearProblem(quadratic_f, u0, 2.0) diff --git a/test/misc/bruss_tests.jl b/test/misc/bruss_tests.jl index 08eae5436..43bcc3eb8 100644 --- a/test/misc/bruss_tests.jl +++ b/test/misc/bruss_tests.jl @@ -47,11 +47,11 @@ @test norm(sol.resid, Inf) < 1e-8 sol = solve(prob_brusselator_2d, - NewtonRaphson(autodiff = AutoSparseForwardDiff()); abstol = 1e-8) + NewtonRaphson(autodiff = AutoSparse(AutoForwardDiff())); abstol = 1e-8) @test norm(sol.resid, Inf) < 1e-8 sol = solve(prob_brusselator_2d, - NewtonRaphson(autodiff = AutoSparseFiniteDiff()); abstol = 1e-8) + NewtonRaphson(autodiff = AutoSparse(AutoFiniteDiff())); abstol = 1e-8) @test norm(sol.resid, Inf) < 1e-8 du0 = copy(u0) @@ -69,10 +69,11 @@ @test !all(iszero, jac_prototype) sol = solve(prob_brusselator_2d, - NewtonRaphson(autodiff = AutoSparseFiniteDiff()); abstol = 1e-8) + NewtonRaphson(autodiff = AutoSparse(AutoFiniteDiff())); abstol = 1e-8) @test norm(sol.resid, Inf) < 1e-8 - cache = init(prob_brusselator_2d, NewtonRaphson(; autodiff = AutoSparseForwardDiff())) + cache = init( + prob_brusselator_2d, NewtonRaphson(; autodiff = AutoSparse(AutoForwardDiff()))) @test maximum(cache.jac_cache.jac_cache.coloring.colorvec) == 12 - @test cache.jac_cache.autodiff isa AutoSparseForwardDiff + @test cache.jac_cache.autodiff isa AutoSparse{<:AutoForwardDiff} end