Line | Exclusive | Inclusive | Code |
---|---|---|---|
1 | """ | ||
2 | NewtonRaphson(; concrete_jac = nothing, linsolve = nothing, linesearch = nothing, | ||
3 | precs = DEFAULT_PRECS, adkwargs...) | ||
4 | |||
5 | An advanced NewtonRaphson implementation with support for efficient handling of sparse | ||
6 | matrices via colored automatic differentiation and preconditioned linear solvers. Designed | ||
7 | for large-scale and numerically-difficult nonlinear systems. | ||
8 | |||
9 | ### Keyword Arguments | ||
10 | |||
11 | - `autodiff`: determines the backend used for the Jacobian. Note that this argument is | ||
12 | ignored if an analytical Jacobian is passed, as that will be used instead. Defaults to | ||
13 | `nothing` which means that a default is selected according to the problem specification! | ||
14 | Valid choices are types from ADTypes.jl. | ||
15 | - `concrete_jac`: whether to build a concrete Jacobian. If a Krylov-subspace method is used, | ||
16 | then the Jacobian will not be constructed and instead direct Jacobian-vector products | ||
17 | `J*v` are computed using forward-mode automatic differentiation or finite differencing | ||
18 | tricks (without ever constructing the Jacobian). However, if the Jacobian is still needed, | ||
19 | for example for a preconditioner, `concrete_jac = true` can be passed in order to force | ||
20 | the construction of the Jacobian. | ||
21 | - `linsolve`: the [LinearSolve.jl](https://github.com/SciML/LinearSolve.jl) used for the | ||
22 | linear solves within the Newton method. Defaults to `nothing`, which means it uses the | ||
23 | LinearSolve.jl default algorithm choice. For more information on available algorithm | ||
24 | choices, see the [LinearSolve.jl documentation](https://docs.sciml.ai/LinearSolve/stable/). | ||
25 | - `precs`: the choice of preconditioners for the linear solver. Defaults to using no | ||
26 | preconditioners. For more information on specifying preconditioners for LinearSolve | ||
27 | algorithms, consult the | ||
28 | [LinearSolve.jl documentation](https://docs.sciml.ai/LinearSolve/stable/). | ||
29 | - `linesearch`: the line search algorithm to use. Defaults to [`LineSearch()`](@ref), | ||
30 | which means that no line search is performed. Algorithms from `LineSearches.jl` can be | ||
31 | used here directly, and they will be converted to the correct `LineSearch`. | ||
32 | - `reuse`: Determines if the Jacobian is reused between (quasi-)Newton steps. Defaults to | ||
33 | `false`. If `true` we check how far we stepped with the same Jacobian, and automatically | ||
34 | take a new Jacobian if we stepped more than `reusetol` or if convergence slows or starts | ||
35 | to diverge. If `false`, the Jacobian is updated in each step. | ||
36 | """ | ||
37 | @concrete struct NewtonRaphson{CJ, AD} <: AbstractNewtonAlgorithm{CJ, AD} | ||
38 | ad::AD | ||
39 | linsolve | ||
40 | precs | ||
41 | linesearch | ||
42 | reusetol | ||
43 | reuse::Bool | ||
44 | end | ||
45 | |||
46 | function set_ad(alg::NewtonRaphson{CJ}, ad) where {CJ} | ||
47 | return NewtonRaphson{CJ}(ad, | ||
48 | alg.linsolve, | ||
49 | alg.precs, | ||
50 | alg.linesearch, | ||
51 | alg.reusetol, | ||
52 | alg.reuse) | ||
53 | end | ||
54 | |||
55 | function NewtonRaphson(; concrete_jac = nothing, linsolve = nothing, linesearch = nothing, | ||
56 | precs = DEFAULT_PRECS, autodiff = nothing, reuse = false, reusetol = 1e-1) | ||
57 | linesearch = linesearch isa LineSearch ? linesearch : LineSearch(; method = linesearch) | ||
58 | return NewtonRaphson{_unwrap_val(concrete_jac)}(autodiff, | ||
59 | linsolve, | ||
60 | precs, | ||
61 | linesearch, | ||
62 | reusetol, | ||
63 | reuse) | ||
64 | end | ||
65 | |||
66 | @concrete mutable struct NewtonRaphsonCache{iip} <: AbstractNonlinearSolveCache{iip} | ||
67 | f | ||
68 | alg | ||
69 | u | ||
70 | fu | ||
71 | res_norm_prev | ||
72 | u_cache | ||
73 | Δu | ||
74 | fu_cache | ||
75 | du | ||
76 | p | ||
77 | uf | ||
78 | linsolve | ||
79 | J | ||
80 | jac_cache | ||
81 | force_stop | ||
82 | maxiters::Int | ||
83 | internalnorm | ||
84 | retcode::ReturnCode.T | ||
85 | abstol | ||
86 | reltol | ||
87 | prob | ||
88 | stats::NLStats | ||
89 | ls_cache | ||
90 | tc_cache | ||
91 | trace | ||
92 | end | ||
93 | |||
94 | 119 (41 %) |
238 (83 %)
samples spent in __init
119 (50 %) (incl.) when called from #init_call#30 line 528 119 (50 %) (incl.) when called from __init line 94
119 (100 %)
samples spent calling
#__init#44
function SciMLBase.__init(prob::NonlinearProblem{uType, iip}, alg_::NewtonRaphson, args...;
|
|
95 | alias_u0 = false, maxiters = 1000, abstol = nothing, reltol = nothing, | ||
96 | termination_condition = nothing, internalnorm = DEFAULT_NORM, linsolve_kwargs = (;), | ||
97 | kwargs...) where {uType, iip} | ||
98 | alg = get_concrete_algorithm(alg_, prob) | ||
99 | @unpack f, u0, p = prob | ||
100 | u = __maybe_unaliased(u0, alias_u0) | ||
101 | Δu = zero(u) | ||
102 | fu = evaluate_f(prob, u) | ||
103 | res_norm_prev = internalnorm(fu) | ||
104 | 119 (41 %) |
119 (100 %)
samples spent calling
jacobian_caches
uf, linsolve, J, fu_cache, jac_cache, du = jacobian_caches(alg, f, u, p, Val(iip);
|
|
105 | linsolve_kwargs) | ||
106 | |||
107 | abstol, reltol, tc_cache = init_termination_cache(abstol, reltol, fu, u, | ||
108 | termination_condition) | ||
109 | |||
110 | ls_cache = init_linesearch_cache(alg.linesearch, f, u, p, fu, Val(iip)) | ||
111 | trace = init_nonlinearsolve_trace(alg, u, fu, ApplyArray(__zero, J), du; kwargs...) | ||
112 | |||
113 | @bb u_cache = copy(u) | ||
114 | |||
115 | return NewtonRaphsonCache{iip}(f, alg, u, fu, res_norm_prev, u_cache, Δu, fu_cache, du, | ||
116 | p, uf, linsolve, J, | ||
117 | jac_cache, false, maxiters, internalnorm, ReturnCode.Default, abstol, reltol, prob, | ||
118 | NLStats(1, 0, 0, 0, 0), ls_cache, tc_cache, trace) | ||
119 | end | ||
120 | |||
121 | function perform_step!(cache::NewtonRaphsonCache{iip}) where {iip} | ||
122 | @unpack alg = cache | ||
123 | @unpack reuse = alg | ||
124 | |||
125 | if reuse | ||
126 | res_norm = cache.internalnorm(cache.fu) | ||
127 | update = (res_norm > cache.res_norm_prev) || (cache.internalnorm(cache.Δu) > alg.reusetol) | ||
128 | if update || cache.stats.njacs == 0 | ||
129 | cache.J = jacobian!!(cache.J, cache) | ||
130 | cache.Δu .*= false | ||
131 | # u = u - J \ fu | ||
132 | linres = dolinsolve(cache, alg.precs, cache.linsolve; A = cache.J, | ||
133 | b = _vec(cache.fu), | ||
134 | linu = _vec(cache.du), cache.p, reltol = cache.abstol) | ||
135 | else | ||
136 | linres = dolinsolve(cache, alg.precs, cache.linsolve; b = _vec(cache.fu), | ||
137 | linu = _vec(cache.du), cache.p, reltol = cache.abstol) | ||
138 | end | ||
139 | |||
140 | cache.res_norm_prev = res_norm | ||
141 | |||
142 | else | ||
143 | 96 (33 %) |
96 (100 %)
samples spent calling
jacobian!!
cache.J = jacobian!!(cache.J, cache)
|
|
144 | |||
145 | # u = u - J \ fu | ||
146 | 69 (24 %) |
69 (100 %)
samples spent calling
dolinsolve
linres = dolinsolve(cache, alg.precs, cache.linsolve; A = cache.J,
|
|
147 | b = _vec(cache.fu), | ||
148 | linu = _vec(cache.du), cache.p, reltol = cache.abstol) | ||
149 | end | ||
150 | |||
151 | cache.linsolve = linres.cache | ||
152 | cache.du = _restructure(cache.du, linres.u) | ||
153 | |||
154 | # Line Search | ||
155 | α = perform_linesearch!(cache.ls_cache, cache.u, cache.du) | ||
156 | @bb axpy!(-α, cache.du, cache.u) | ||
157 | |||
158 | evaluate_f(cache, cache.u, cache.p) | ||
159 | |||
160 | update_trace!(cache, α) | ||
161 | check_and_update!(cache, cache.fu, cache.u, cache.u_cache) | ||
162 | |||
163 | @bb axpy!(true,cache.u - cache.u_cache,cache.Δu) | ||
164 | @bb copyto!(cache.u_cache, cache.u) | ||
165 | return nothing | ||
166 | end |