From 6ff3027e298504975fdf265a3d4b8378b4fee016 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Wed, 7 Jul 2021 18:07:43 -0500 Subject: [PATCH 01/23] Add a progress meter for LMM fits --- Project.toml | 2 +- src/linearmixedmodel.jl | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index 49ef70bb9..a5f65a451 100644 --- a/Project.toml +++ b/Project.toml @@ -34,7 +34,7 @@ JSON3 = "1" LazyArtifacts = "1" NLopt = "0.5, 0.6" PooledArrays = "0.5, 1" -ProgressMeter = "1.5" +ProgressMeter = "1.5, 1.6, 1.7" StaticArrays = "0.11, 0.12, 1" StatsBase = "0.31, 0.32, 0.33" StatsFuns = "0.8, 0.9" diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index 4df60c5b3..391e2815a 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -397,15 +397,20 @@ function fit!(m::LinearMixedModel{T}; verbose::Bool = false, REML::Bool = false) end opt = Opt(optsum) optsum.REML = REML + prog = ProgressUnknown(; showspeed=true) + iter = 1 function obj(x, g) isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = objective(updateL!(setθ!(m, x))) + ProgressMeter.next!(prog; showvalues = [(:objective, val),]) + iter += 1 verbose && println(round(val, digits = 5), " ", x) val - end + end NLopt.min_objective!(opt, obj) optsum.finitial = obj(optsum.initial, T[]) fmin, xmin, ret = NLopt.optimize!(opt, copyto!(optsum.final, optsum.initial)) + ProgressMeter.finish!(prog) ## check if small non-negative parameter values can be set to zero xmin_ = copy(xmin) lb = optsum.lowerbd From dbf1c09f8f670006a70083d9b538ae219c8ce2f1 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Wed, 7 Jul 2021 18:21:45 -0500 Subject: [PATCH 02/23] Remove unnecessary iteration counter --- src/linearmixedmodel.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index 391e2815a..155986783 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -398,15 +398,13 @@ function fit!(m::LinearMixedModel{T}; verbose::Bool = false, REML::Bool = false) opt = Opt(optsum) optsum.REML = REML prog = ProgressUnknown(; showspeed=true) - iter = 1 function obj(x, g) isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = objective(updateL!(setθ!(m, x))) ProgressMeter.next!(prog; showvalues = [(:objective, val),]) - iter += 1 verbose && println(round(val, digits = 5), " ", x) val - end + end NLopt.min_objective!(opt, obj) optsum.finitial = obj(optsum.initial, T[]) fmin, xmin, ret = NLopt.optimize!(opt, copyto!(optsum.final, optsum.initial)) From 8e38b1772bcb64c468fbbe0d544d4ccd8350f3d1 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Thu, 8 Jul 2021 10:00:15 -0500 Subject: [PATCH 03/23] require at least julia 1.6 --- .github/workflows/Tier1.yml | 2 +- NEWS.md | 4 ++++ Project.toml | 2 +- README.md | 8 ++++---- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Tier1.yml b/.github/workflows/Tier1.yml index 1b07f523c..0554fd5d6 100644 --- a/.github/workflows/Tier1.yml +++ b/.github/workflows/Tier1.yml @@ -19,7 +19,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - julia-version: [1.4, 1.6] + julia-version: [1.6] julia-arch: [x64] os: [ubuntu-18.04, macos-10.15, windows-2019] steps: diff --git a/NEWS.md b/NEWS.md index 680fe0696..31ed9161f 100644 --- a/NEWS.md +++ b/NEWS.md @@ -24,6 +24,9 @@ MixedModels v4.0.0 Release Notes * `saveoptsum` and `restoreoptsum!` provide for saving and restoring the `optsum` field of a `LinearMixedModel` as a JSON file, allowing for recreating a model fit that may take a long time for parameter optimization. [#506] +* Verbose output now uses `ProgressMeter`, which gives useful information about the timing + of each iteration and does not flood stdio. [#539] +* Support for Julia < 1.6 has been dropped. [#539] Run-time formula syntax ----------------------- @@ -257,3 +260,4 @@ Package dependencies [#524]: https://github.com/JuliaStats/MixedModels.jl/issues/524 [#536]: https://github.com/JuliaStats/MixedModels.jl/issues/536 [#537]: https://github.com/JuliaStats/MixedModels.jl/issues/537 +[#539]: https://github.com/JuliaStats/MixedModels.jl/issues/539 diff --git a/Project.toml b/Project.toml index a5f65a451..fdd50cba8 100644 --- a/Project.toml +++ b/Project.toml @@ -41,7 +41,7 @@ StatsFuns = "0.8, 0.9" StatsModels = "0.6.23" StructTypes = "1" Tables = "1" -julia = "1.4" +julia = "1.6" [extras] DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" diff --git a/README.md b/README.md index 13f9e3aa5..ec4de3ce1 100644 --- a/README.md +++ b/README.md @@ -49,11 +49,11 @@ Typical distribution forms are _Bernoulli_ for binary data or _Poisson_ for coun |OS|OS Version|Arch|Julia|Tier| |:-:|:-:|:-:|:-:|:-:| -|Linux|Ubuntu 18.04|x64|v1.4, 1.5, 1.6|1| -|macOS|Catalina 10.15|x64|v1.4, 1.5, 1.6|1| -|Windows|Server 2019|x64|v1.4, 1.5, 1.6|1| +|Linux|Ubuntu 18.04|x64|v1.6|1| +|macOS|Catalina 10.15|x64v1.6|1| +|Windows|Server 2019|x64|v1.6|1| -Upon release of the next Julia LTS, Tier 1 will become Tier 2 and Julia LTS will become Tier 1. +Note that previous releases still support older Julia versions. ## Version 3.0.0 From e8d0030189274267b7aaf677826c2f451beba319 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Thu, 8 Jul 2021 12:28:04 -0500 Subject: [PATCH 04/23] Add progress meter for GLMMs. Change format. --- src/generalizedlinearmixedmodel.jl | 6 +++++- src/linearmixedmodel.jl | 5 ++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 6f781f65b..6af1659e9 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -241,7 +241,7 @@ fit( ) """ - fit!(m::GeneralizedLinearMixedModel[, verbose = false, fast = false, nAGQ = 1]) + fit!(m::GeneralizedLinearMixedModel[, verbose=false, fast=false, nAGQ=1, progress=true]) Optimize the objective function for `m`. @@ -254,6 +254,7 @@ function fit!( verbose::Bool = false, fast::Bool = false, nAGQ::Integer = 1, + progress::Bool = true, ) where {T} β = m.β lm = m.LMM @@ -269,16 +270,19 @@ function fit!( optsum.final = copy(optsum.initial) end setpar! = fast ? setθ! : setβθ! + prog = ProgressUnknown("Minimizing"; showspeed=true) function obj(x, g) isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = deviance(pirls!(setpar!(m, x), fast, verbose), nAGQ) verbose && println(round(val, digits = 5), " ", x) + progress && ProgressMeter.next!(prog; showvalues = [(:objective, val),]) val end opt = Opt(optsum) NLopt.min_objective!(opt, obj) optsum.finitial = obj(optsum.initial, T[]) fmin, xmin, ret = NLopt.optimize(opt, copyto!(optsum.final, optsum.initial)) + ProgressMeter.finish!(prog) ## check if very small parameter values bounded below by zero can be set to zero xmin_ = copy(xmin) for i in eachindex(xmin_) diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index 155986783..b4e5d4a4b 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -397,12 +397,11 @@ function fit!(m::LinearMixedModel{T}; verbose::Bool = false, REML::Bool = false) end opt = Opt(optsum) optsum.REML = REML - prog = ProgressUnknown(; showspeed=true) + prog = ProgressUnknown("Minimizing"; showspeed=true) function obj(x, g) isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = objective(updateL!(setθ!(m, x))) - ProgressMeter.next!(prog; showvalues = [(:objective, val),]) - verbose && println(round(val, digits = 5), " ", x) + verbose && ProgressMeter.next!(prog; showvalues = [(:objective, val),]) val end NLopt.min_objective!(opt, obj) From cbd69083e26f988011a84dbf7e715fc091bb88e3 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Fri, 9 Jul 2021 11:42:27 -0500 Subject: [PATCH 05/23] Rename verbose to progress, change default to true --- src/generalizedlinearmixedmodel.jl | 4 ++++ src/linearmixedmodel.jl | 27 ++++++++++++++------------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 6af1659e9..86a06aa1a 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -248,6 +248,10 @@ Optimize the objective function for `m`. When `fast` is `true` a potentially much faster but slightly less accurate algorithm, in which `pirls!` optimizes both the random effects and the fixed-effects parameters, is used. + +if `progress` is `true`, the default, a `ProgressMeter.ProgressUnknown` counter is displayed +during the iterations to minimize the deviance. There is a delay before this display is initialized +and it may not be shown at all for models that are optimized quickly. """ function fit!( m::GeneralizedLinearMixedModel{T}; diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index b4e5d4a4b..7182f8a60 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -166,7 +166,7 @@ fit( tbl; wts = [], contrasts = Dict{Symbol,Any}(), - verbose::Bool = false, + progress::Bool = true, REML::Bool = false, ) = fit( LinearMixedModel, @@ -174,7 +174,7 @@ fit( Tables.columntable(tbl), wts = wts, contrasts = contrasts, - verbose = verbose, + progress = progress, REML = REML, ) @@ -184,11 +184,11 @@ fit( tbl::Tables.ColumnTable; wts = wts, contrasts = contrasts, - verbose = verbose, + progress = progress, REML = REML, ) = fit!( LinearMixedModel(f, tbl, contrasts = contrasts, wts = wts), - verbose = verbose, + progress = progress, REML = REML, ) @@ -201,7 +201,7 @@ fit( tbl; wts = [], contrasts = Dict{Symbol,Any}(), - verbose::Bool = false, + progress::Bool = true, REML::Bool = false, offset = [], ) = !isempty(offset) ? _offseterr() : fit( @@ -210,7 +210,7 @@ fit( tbl, wts = wts, contrasts = contrasts, - verbose = verbose, + progress = progress, REML = REML, ) @@ -222,7 +222,7 @@ fit( l::IdentityLink; wts = [], contrasts = Dict{Symbol,Any}(), - verbose::Bool = false, + progress::Bool = true, REML::Bool = false, offset = [], fast::Bool = false, @@ -233,7 +233,7 @@ fit( tbl, wts = wts, contrasts = contrasts, - verbose = verbose, + progress = progress, REML = REML, ) @@ -384,12 +384,13 @@ function feL(m::LinearMixedModel) end """ - fit!(m::LinearMixedModel[; verbose::Bool=false, REML::Bool=false]) + fit!(m::LinearMixedModel[; progress::Bool=true, REML::Bool=false]) -Optimize the objective of a `LinearMixedModel`. When `verbose` is `true` the values of the -objective and the parameters are printed on stdout at each function evaluation. +Optimize the objective of a `LinearMixedModel`. When `progress` is `true` a +`ProgressMeter.ProgressUnknown` display is shown during the optimization of the +objective, if the optimization takes more than one second or so. """ -function fit!(m::LinearMixedModel{T}; verbose::Bool = false, REML::Bool = false) where {T} +function fit!(m::LinearMixedModel{T}; progress::Bool=true, REML::Bool=false) where {T} optsum = m.optsum # this doesn't matter for LMM, but it does for GLMM, so let's be consistent if optsum.feval > 0 @@ -401,7 +402,7 @@ function fit!(m::LinearMixedModel{T}; verbose::Bool = false, REML::Bool = false) function obj(x, g) isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = objective(updateL!(setθ!(m, x))) - verbose && ProgressMeter.next!(prog; showvalues = [(:objective, val),]) + progress && ProgressMeter.next!(prog; showvalues = [(:objective, val),]) val end NLopt.min_objective!(opt, obj) From 5696c9169dc19321fe574df22212393db0d69ed3 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Sat, 10 Jul 2021 10:31:48 -0500 Subject: [PATCH 06/23] Expand on NEWS entry for #539 --- NEWS.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index 31ed9161f..20bb42ccb 100644 --- a/NEWS.md +++ b/NEWS.md @@ -25,7 +25,8 @@ MixedModels v4.0.0 Release Notes field of a `LinearMixedModel` as a JSON file, allowing for recreating a model fit that may take a long time for parameter optimization. [#506] * Verbose output now uses `ProgressMeter`, which gives useful information about the timing - of each iteration and does not flood stdio. [#539] + of each iteration and does not flood stdio. The `verbose` argument has been renamed `progress` + and the default changed to `true`. [#539] * Support for Julia < 1.6 has been dropped. [#539] Run-time formula syntax From 8c3ddffa06b5bd5ba87d27342375d5dfedc3aac9 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Sat, 10 Jul 2021 10:42:03 -0500 Subject: [PATCH 07/23] Back-out changes to compat for ProgressMeter --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index fdd50cba8..7287a723d 100644 --- a/Project.toml +++ b/Project.toml @@ -34,7 +34,7 @@ JSON3 = "1" LazyArtifacts = "1" NLopt = "0.5, 0.6" PooledArrays = "0.5, 1" -ProgressMeter = "1.5, 1.6, 1.7" +ProgressMeter = "1.5" StaticArrays = "0.11, 0.12, 1" StatsBase = "0.31, 0.32, 0.33" StatsFuns = "0.8, 0.9" From b002ad205f3934aee8513df5669b7ee265b11249 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 09:03:25 -0500 Subject: [PATCH 08/23] Update src/generalizedlinearmixedmodel.jl Co-authored-by: Phillip Alday --- src/generalizedlinearmixedmodel.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 86a06aa1a..5121cdaea 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -249,7 +249,7 @@ When `fast` is `true` a potentially much faster but slightly less accurate algor which `pirls!` optimizes both the random effects and the fixed-effects parameters, is used. -if `progress` is `true`, the default, a `ProgressMeter.ProgressUnknown` counter is displayed +If `progress` is `true`, the default, a `ProgressMeter.ProgressUnknown` counter is displayed. during the iterations to minimize the deviance. There is a delay before this display is initialized and it may not be shown at all for models that are optimized quickly. """ From 4c1adcf3a2203ee147aef6d58f581c7a1efb81ab Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 09:03:41 -0500 Subject: [PATCH 09/23] Update src/generalizedlinearmixedmodel.jl Co-authored-by: Phillip Alday --- src/generalizedlinearmixedmodel.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 5121cdaea..edbc86744 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -252,6 +252,8 @@ is used. If `progress` is `true`, the default, a `ProgressMeter.ProgressUnknown` counter is displayed. during the iterations to minimize the deviance. There is a delay before this display is initialized and it may not be shown at all for models that are optimized quickly. + +If `verbose` is `true`, then both the intermediate results of both the nonlinear optimization and PIRLS are also displayed on standard output. """ function fit!( m::GeneralizedLinearMixedModel{T}; From 20807e7d81440b2edcfb67f0651d1074099a8c51 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 12:12:29 -0500 Subject: [PATCH 10/23] Add progress arg to refit! with default of false. --- src/generalizedlinearmixedmodel.jl | 15 ++++++++++----- src/linearmixedmodel.jl | 13 +++++++------ 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index edbc86744..1831666d7 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -171,6 +171,7 @@ fit( verbose::Bool = false, fast::Bool = false, nAGQ::Integer = 1, + progress::Bool = true, ) = fit( GeneralizedLinearMixedModel, f, @@ -183,6 +184,7 @@ fit( verbose = verbose, fast = fast, nAGQ = nAGQ, + progress = progress, ) fit( @@ -197,6 +199,7 @@ fit( verbose::Bool = false, fast::Bool = false, nAGQ::Integer = 1, + progress::Bool = true, ) = fit!( GeneralizedLinearMixedModel( f, @@ -210,6 +213,7 @@ fit( verbose = verbose, fast = fast, nAGQ = nAGQ, + progress = progress, ) @@ -226,6 +230,7 @@ fit( REML::Bool = false, fast::Bool = false, nAGQ::Integer = 1, + progress::Bool = true, ) = fit( GeneralizedLinearMixedModel, f, @@ -238,6 +243,7 @@ fit( verbose = verbose, fast = fast, nAGQ = nAGQ, + progress = progress, ) """ @@ -583,21 +589,20 @@ If not specified, the `fast` and `nAGQ` options from the previous fit are used. """ function refit!(m::GeneralizedLinearMixedModel{T}; fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ) where T + nAGQ::Integer = m.optsum.nAGQ, progress::Bool=false) where T - fit!(unfit!(m); fast=fast, nAGQ=nAGQ) + fit!(unfit!(m); fast=fast, nAGQ=nAGQ, progress=progress) end function refit!(m::GeneralizedLinearMixedModel{T}, y; fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ) where T + nAGQ::Integer = m.optsum.nAGQ, progress::Bool=false) where T m_resp_y = m.resp.y length(y) == size(m_resp_y, 1) || throw(DimensionMismatch("")) copyto!(m_resp_y, y) - refit!(m) + refit!(m; progress=progress) end - """ setβθ!(m::GeneralizedLinearMixedModel, v) diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index 7182f8a60..f718bc34c 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -775,21 +775,22 @@ function reevaluateAend!(m::LinearMixedModel) end """ - refit!(m::LinearMixedModel[, y::Vector]; REML=m.optsum.REML) + refit!(m::LinearMixedModel[, y::Vector]; REML=m.optsum.REML, progress::Bool=false) Refit the model `m` after installing response `y`. -If `y` is omitted the current response vector is used. +If `y` is omitted the current response vector is used. Note that the default for +`progress` is `false` on refits (whereas it is `true` in the `fit!` methods.) """ -function refit!(m::LinearMixedModel; REML=m.optsum.REML) - fit!(unfit!(m); REML=REML) +function refit!(m::LinearMixedModel; REML=m.optsum.REML, progress::Bool=false) + fit!(unfit!(m); REML=REML, progress=progress) end -function refit!(m::LinearMixedModel, y; REML=m.optsum.REML) +function refit!(m::LinearMixedModel, y; REML=m.optsum.REML, progress::Bool=false) resp = m.y length(y) == length(resp) || throw(DimensionMismatch("")) copyto!(resp, y) - refit!(m; REML=REML) + refit!(m; REML=REML, progress=progress) end """ From 5a311178deb795ae93149058174e0908ef576975 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 12:20:05 -0500 Subject: [PATCH 11/23] progress=false in fits; model_response -> response --- test/FactorReTerm.jl | 16 ++++++++-------- test/bootstrap.jl | 6 +++--- test/fit.jl | 8 ++++---- test/grouping.jl | 2 +- test/linalg.jl | 6 +++--- test/matrixterm.jl | 2 +- test/modelcache.jl | 4 ++-- test/pirls.jl | 18 +++++++++--------- test/pls.jl | 12 ++++++------ 9 files changed, 37 insertions(+), 37 deletions(-) diff --git a/test/FactorReTerm.jl b/test/FactorReTerm.jl index 68e83e0bf..d42042de0 100644 --- a/test/FactorReTerm.jl +++ b/test/FactorReTerm.jl @@ -117,8 +117,8 @@ end # equality of RE terms not defined so check that they generate same modelcols @test modelcols(ff1.rhs[end], slp) == modelcols(ff2.rhs[end], slp) - m1 = fit(LMM, f1, slp) - m2 = fit(LMM, f2, slp) + m1 = fit(LMM, f1, slp; progress=false) + m2 = fit(LMM, f2, slp; progress=false) @test all(m1.λ .== m2.λ) @test StatsModels.terms(f2.rhs[end]) == [one, d, s] @@ -140,8 +140,8 @@ end # test that zerocorr actually worked @test mc1.inds == mc2.inds == [1, 4] - m1 = fit(LMM, f1, slp) - m2 = fit(LMM, f2, slp) + m1 = fit(LMM, f1, slp; progress=false) + m2 = fit(LMM, f2, slp; progress=false) @test all(m1.λ .== m2.λ) @test StatsModels.terms(f2.rhs[end]) == [one, d, s] @@ -263,10 +263,10 @@ end # fitted model to test amalgamate and fnames, and equivalence with other formulations psts = dataset("pastes") - m = fit(MixedModel, @formula(strength ~ 1 + (1|batch/cask)), psts) - m2 = fit(MixedModel, @formula(strength ~ 1 + (1|batch) + (1|batch&cask)), psts) - mr = fit(MixedModel, term(:strength) ~ term(1) + (term(1)|term(:batch)/term(:cask)), psts) - m2r = fit(MixedModel, term(:strength) ~ term(1) + (term(1)|term(:batch)) + (term(1)|term(:batch)&term(:cask)), psts) + m = fit(MixedModel, @formula(strength ~ 1 + (1|batch/cask)), psts; progress=false) + m2 = fit(MixedModel, @formula(strength ~ 1 + (1|batch) + (1|batch&cask)), psts; progress=false) + mr = fit(MixedModel, term(:strength) ~ term(1) + (term(1)|term(:batch)/term(:cask)), psts; progress=false) + m2r = fit(MixedModel, term(:strength) ~ term(1) + (term(1)|term(:batch)) + (term(1)|term(:batch)&term(:cask)), psts; progress=false) @test fnames(m) == fnames(m2) == fnames(mr) == fnames(m2r) == (Symbol("batch & cask"), :batch) @test m.λ == m2.λ == mr.λ == m2r.λ diff --git a/test/bootstrap.jl b/test/bootstrap.jl index 3b3d629ff..e9aa3302e 100644 --- a/test/bootstrap.jl +++ b/test/bootstrap.jl @@ -39,14 +39,14 @@ include("modelcache.jl") center(v::AbstractVector) = v .- (sum(v) / length(v)) grouseticks = DataFrame(dataset(:grouseticks)) grouseticks.ch = center(grouseticks.height) - gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true) # fails in pirls! with fast=false + gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true, progress=false) # fails in pirls! with fast=false gm4sim = refit!(simulate!(StableRNG(42), deepcopy(gm4))) @test isapprox(gm4.β, gm4sim.β; atol=norm(stderror(gm4))) end @testset "Binomial" begin cbpp = dataset(:cbpp) - gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz)) + gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz), progress=false) gm2sim = refit!(simulate!(StableRNG(42), deepcopy(gm2)), fast=true) @test isapprox(gm2.β, gm2sim.β; atol=norm(stderror(gm2))) end @@ -102,7 +102,7 @@ end contra = dataset(:contra) # need a model with fast=false to test that we only # copy the optimizer constraints for θ and not β - gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), fast=false) + gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), fast=false, progress=false) bs = parametricbootstrap(StableRNG(42), 100, gm0) # make sure we're not copying @test length(bs.lowerbd) == length(gm0.θ) diff --git a/test/fit.jl b/test/fit.jl index d9b7d5874..622fa506d 100644 --- a/test/fit.jl +++ b/test/fit.jl @@ -1,18 +1,18 @@ using MixedModels, Test @testset "linear" begin - m1 = fit(MixedModel, @formula(yield ~ 1 + (1|batch)), MixedModels.dataset(:dyestuff)) + m1 = fit(MixedModel, @formula(yield ~ 1 + (1|batch)), MixedModels.dataset(:dyestuff); progress=false) @test first(m1.θ) ≈ 0.7525806757718846 rtol=1.0e-5 end @testset "generalized" begin gm1 = fit(MixedModel, @formula(use ~ 1 + urban + livch + age + abs2(age) + (1|dist)), - MixedModels.dataset(:contra), Bernoulli()) + MixedModels.dataset(:contra), Bernoulli(); progress=false) @test deviance(gm1) ≈ 2372.7286 atol=1.0e-3 end @testset "Normal-IdentityLink" begin - @test isa(fit(MixedModel, @formula(yield ~ 1 + (1|batch)), MixedModels.dataset(:dyestuff), Normal()), + @test isa(fit(MixedModel, @formula(yield ~ 1 + (1|batch)), MixedModels.dataset(:dyestuff), Normal(); progress=false), LinearMixedModel) @test_throws(ArgumentError("use LinearMixedModel for Normal distribution with IdentityLink"), fit(GeneralizedLinearMixedModel, @@ -22,6 +22,6 @@ end @testset "Normal Distribution GLMM" begin @test isa(fit(MixedModel, @formula(yield ~ 1 + (1|batch)), MixedModels.dataset(:dyestuff), - Normal(), SqrtLink()), + Normal(), SqrtLink(); progress=false), GeneralizedLinearMixedModel) end diff --git a/test/grouping.jl b/test/grouping.jl index 6bc31d5a0..a6b53f9aa 100644 --- a/test/grouping.jl +++ b/test/grouping.jl @@ -23,5 +23,5 @@ end @test all(t.contrasts.levels[i] == lev for (i,lev) in enumerate(levs)) # @test_throws OutOfMemoryError fit(MixedModel, @formula(y ~ 1 + (1 | grp)), d) - @test fit(MixedModel, @formula(y ~ 1 + (1 | grp)), d, contrasts=Dict(:grp => Grouping())) isa LinearMixedModel + @test fit(MixedModel, @formula(y ~ 1 + (1 | grp)), d, contrasts=Dict(:grp => Grouping()), progress=false) isa LinearMixedModel end diff --git a/test/linalg.jl b/test/linalg.jl index d62608857..26b2c79f0 100644 --- a/test/linalg.jl +++ b/test/linalg.jl @@ -45,8 +45,8 @@ end G = repeat(PooledArray(string.('A':'T')), inner = 2, outer=10), H = repeat(PooledArray(string.('a':'j')), inner=40), ) - m1 = fit(MixedModel, @formula(Y ~ 1 + A + (1+A|G) + (1+A|H)), df) - wm1 = fit(MixedModel, @formula(Y ~ 1+A+(1+A|G)+(1+A|H)), df, wts = ones(400)) + m1 = fit(MixedModel, @formula(Y ~ 1 + A + (1+A|G) + (1+A|H)), df; progress=false) + wm1 = fit(MixedModel, @formula(Y ~ 1+A+(1+A|G)+(1+A|H)), df, wts=ones(400), progress=false) @test loglikelihood(wm1) ≈ loglikelihood(m1) MixedModels.reweight!(wm1, ones(400)) @test loglikelihood(refit!(wm1)) ≈ loglikelihood(m1) @@ -93,7 +93,7 @@ end # an example in MixedModels.jl#123 df = gendata(10000, 500) f = @formula(Y ~ (1 + X|H) + (1|G)) - m500 = fit!(LinearMixedModel(f, df)) + m500 = fit!(LinearMixedModel(f, df), progress=false) # the real test here isn't in the theta comparison but in that the fit # completes successfully @test length(m500.u) == 2 diff --git a/test/matrixterm.jl b/test/matrixterm.jl index 2fe31dd02..6ac9ea24c 100644 --- a/test/matrixterm.jl +++ b/test/matrixterm.jl @@ -42,7 +42,7 @@ end @test typeof(X.x) <: SparseMatrixCSC @test X.rank == 28 @test X.cnames == fe.cnames - m1 = fit!(LinearMixedModel(collect(m.y), X, deepcopy(m.reterms), m.formula)) + m1 = fit!(LinearMixedModel(collect(m.y), X, deepcopy(m.reterms), m.formula); progress=false) @test isapprox(m1.θ, m.θ, rtol = 1.0e-5) end diff --git a/test/modelcache.jl b/test/modelcache.jl index 79fd0954e..5512bf860 100644 --- a/test/modelcache.jl +++ b/test/modelcache.jl @@ -38,13 +38,13 @@ using MixedModels: dataset # for some reason it seems necessary to prime the pump in julia-1.6.0-DEV @isdefined(fittedmodels) || const global fittedmodels = Dict{Symbol,Vector{MixedModel}}( - :dyestuff => [fit(MixedModel, only(fms[:dyestuff]), dataset(:dyestuff))] + :dyestuff => [fit(MixedModel, only(fms[:dyestuff]), dataset(:dyestuff); progress=false)] ); @isdefined(allfms) || const global allfms = merge(fms, gfms) function models(nm::Symbol) get!(fittedmodels, nm) do - [fit(MixedModel, f, dataset(nm)) for f in allfms[nm]] + [fit(MixedModel, f, dataset(nm), progress=false) for f in allfms[nm]] end end diff --git a/test/pirls.jl b/test/pirls.jl index 7d929f795..a6e038b24 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -13,7 +13,7 @@ include("modelcache.jl") @testset "contra" begin contra = dataset(:contra) - gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), fast=true) + gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), fast=true, progress=false) @test gm0.lowerbd == zeros(1) @test isapprox(gm0.θ, [0.5720734451352923], atol=0.001) @test !issingular(gm0) @@ -37,7 +37,7 @@ include("modelcache.jl") @test Distribution(gm0) == Distribution(gm0.resp) @test Link(gm0) == Link(gm0.resp) - gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli()); + gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(); progress=false); @test isapprox(gm1.θ, [0.573054], atol=0.005) @test lowerbd(gm1) == vcat(fill(-Inf, 7), 0.) @test isapprox(deviance(gm1), 2361.54575, rtol=0.00001) @@ -47,7 +47,7 @@ include("modelcache.jl") @test nobs(gm0) == 1934 refit!(gm0, fast=true, nAGQ=7) @test isapprox(deviance(gm0), 2360.9838, atol=0.001) - gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), nAGQ=7) + gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), nAGQ=7, progress=false) @test isapprox(deviance(gm1), 2360.8760, atol=0.001) @test gm1.β == gm1.beta @test gm1.θ == gm1.theta @@ -55,7 +55,7 @@ include("modelcache.jl") @test length(gm1y) == size(gm1.X, 1) @test eltype(gm1y) == eltype(gm1.X) @test gm1y == (MixedModels.dataset(:contra).use .== "Y") - @test model_response(gm1) == gm1y + @test response(gm1) == gm1y @test !islinear(gm1) @test :θ in propertynames(gm0) @@ -74,7 +74,7 @@ end @testset "cbpp" begin cbpp = dataset(:cbpp) - gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz)) + gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz), progress=false) @test weights(gm2) == cbpp.hsz @test deviance(gm2,true) ≈ 100.09585619892968 atol=0.0001 @test sum(abs2, gm2.u[1]) ≈ 9.723054788538546 atol=0.0001 @@ -102,7 +102,7 @@ end end @testset "verbagg" begin - gm3 = fit(MixedModel, only(gfms[:verbagg]), dataset(:verbagg), Bernoulli()) + gm3 = fit(MixedModel, only(gfms[:verbagg]), dataset(:verbagg), Bernoulli(), progress=false) @test deviance(gm3) ≈ 8151.40 rtol=1e-5 @test lowerbd(gm3) == vcat(fill(-Inf, 6), zeros(2)) @test fitted(gm3) == predict(gm3) @@ -115,7 +115,7 @@ end center(v::AbstractVector) = v .- (sum(v) / length(v)) grouseticks = DataFrame(dataset(:grouseticks)) grouseticks.ch = center(grouseticks.height) - gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true) # fails in pirls! with fast=false + gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true, progress=false) # fails in pirls! with fast=false @test isapprox(deviance(gm4), 851.4046, atol=0.001) # these two values are not well defined at the optimum #@test isapprox(sum(x -> sum(abs2, x), gm4.u), 196.8695297987013, atol=0.1) @@ -152,7 +152,7 @@ end @test deviance(m1) ≈ 193.5587302384811 rtol=1.e-5 @test only(m1.β) ≈ 4.192196439077657 atol=1.e-5 @test only(m1.θ) ≈ 1.838245201739852 atol=1.e-5 - m11 = fit(MixedModel, gform, goldstein, Poisson(), nAGQ=11) + m11 = fit(MixedModel, gform, goldstein, Poisson(), nAGQ=11, progress=false) @test deviance(m11) ≈ 193.51028088736842 rtol=1.e-5 @test only(m11.β) ≈ 4.192196439077657 atol=1.e-5 @test only(m11.θ) ≈ 1.838245201739852 atol=1.e-5 @@ -181,7 +181,7 @@ end # The offset of log.(expected) is to examine the ratio of observed to expected, based on population mmec = dataset(:mmec) mmform = @formula(deaths ~ 1 + uvb + (1|region)) - gm5 = fit(MixedModel, mmform, mmec, Poisson(); offset=log.(mmec.expected), nAGQ=11) + gm5 = fit(MixedModel, mmform, mmec, Poisson(); offset=log.(mmec.expected), nAGQ=11, progress=false) @test isapprox(deviance(gm5), 655.2533533016059, atol=5.e-3) @test isapprox(first(gm5.θ), 0.4121684550775567, atol=1.e-3) @test isapprox(first(gm5.β), -0.13860166843315044, atol=1.e-3) diff --git a/test/pls.jl b/test/pls.jl index f59259654..8c6a7d6a4 100644 --- a/test/pls.jl +++ b/test/pls.jl @@ -90,7 +90,7 @@ end @test fm1.X == ones(30,1) ds = MixedModels.dataset(:dyestuff) @test fm1.y == ds[:yield] - @test model_response(fm1) == ds.yield + @test response(fm1) == ds.yield @test cond(fm1) == ones(1) @test first(leverage(fm1)) ≈ 0.15650534392640486 rtol=1.e-5 @test sum(leverage(fm1)) ≈ 4.695160317792145 rtol=1.e-5 @@ -379,7 +379,7 @@ end @test ρ === -0.0 # test that systematic zero correlations are returned as -0.0 MixedModels.likelihoodratiotest(fm, fmnc) - fmrs = fit(MixedModel, @formula(reaction ~ 1+days + (0+days|subj)), slp); + fmrs = fit(MixedModel, @formula(reaction ~ 1+days + (0+days|subj)), slp; progress=false); @test objective(fmrs) ≈ 1774.080315280528 rtol=0.00001 @test fmrs.θ ≈ [0.24353985679033105] rtol=0.00001 @@ -394,7 +394,7 @@ end # combining [ReMat{T,S1}, ReMat{T,S2}] for S1 ≠ S2 slpcat = (subj = slp.subj, days = PooledArray(string.(slp.days)), reaction = slp.reaction) - fm_cat = fit(MixedModel, @formula(reaction ~ 1+days+(1|subj)+(0+days|subj)),slpcat) + fm_cat = fit(MixedModel, @formula(reaction ~ 1+days+(1|subj)+(0+days|subj)),slpcat; progress=false) @test fm_cat isa LinearMixedModel σρ = fm_cat.σρs @test σρ isa NamedTuple @@ -411,7 +411,7 @@ end @test all(ρs_intercept .=== -0.0) # also works without explicitly dropped intercept - fm_cat2 = fit(MixedModel, @formula(reaction ~ 1+days+(1|subj)+(days|subj)),slpcat) + fm_cat2 = fit(MixedModel, @formula(reaction ~ 1+days+(1|subj)+(days|subj)),slpcat; progress=false) @test fm_cat2 isa LinearMixedModel σρ = fm_cat2.σρs @test σρ isa NamedTuple @@ -511,7 +511,7 @@ end rng = MersenneTwister(0); x = rand(rng, 100); data = (x = x, x2 = 1.5 .* x, y = rand(rng, 100), z = repeat('A':'T', 5)) - model = fit(MixedModel, @formula(y ~ x + x2 + (1|z)), data) + model = fit(MixedModel, @formula(y ~ x + x2 + (1|z)), data; progress=false) @test length(fixef(model)) == 2 @test rank(model) == 2 @test length(coef(model)) == 3 @@ -546,7 +546,7 @@ end @test vcov(m1) ≈ [1.177035 -4.802598; -4.802598 24.664497] atol = 1.e-4 =# - m2 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data, wts = data.w1) + m2 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data, wts = data.w1, progress=false) @test m2.θ ≈ [0.295181729258352] atol = 1.e-4 @test stderror(m2) ≈ [0.9640167, 3.6309696] atol = 1.e-4 @test vcov(m2) ≈ [0.9293282 -2.557527; -2.5575267 13.183940] atol = 1.e-4 From c602e6787566902797bdfdcc497eee78e76c86f9 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 12:31:08 -0500 Subject: [PATCH 12/23] Missed one `progress=false` change --- test/pirls.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/pirls.jl b/test/pirls.jl index a6e038b24..adea18d32 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -147,7 +147,7 @@ end gform = @formula(y ~ 1 + (1|group)) m1 = GeneralizedLinearMixedModel(gform, goldstein, Poisson()) @test !isfitted(m1) - fit!(m1) + fit!(m1; progress=false) @test isfitted(m1) @test deviance(m1) ≈ 193.5587302384811 rtol=1.e-5 @test only(m1.β) ≈ 4.192196439077657 atol=1.e-5 From e15dd1c78cca9aa04354f0b54a9702b54dc1f658 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 12:53:28 -0500 Subject: [PATCH 13/23] ProgressMeter compat bump (needed for showspeed) --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 7287a723d..b5176c889 100644 --- a/Project.toml +++ b/Project.toml @@ -34,7 +34,7 @@ JSON3 = "1" LazyArtifacts = "1" NLopt = "0.5, 0.6" PooledArrays = "0.5, 1" -ProgressMeter = "1.5" +ProgressMeter = "1.7" StaticArrays = "0.11, 0.12, 1" StatsBase = "0.31, 0.32, 0.33" StatsFuns = "0.8, 0.9" From bf56ca28cfd1c1c4c8655c1068c32e6c8ec917a7 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 13:08:04 -0500 Subject: [PATCH 14/23] kwargs... for refit! --- src/generalizedlinearmixedmodel.jl | 19 ++++++++++--------- src/linearmixedmodel.jl | 16 ++++++++-------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 1831666d7..45893dd95 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -577,30 +577,31 @@ LinearAlgebra.rank(m::GeneralizedLinearMixedModel) = m.LMM.feterm.rank """ refit!(m::GeneralizedLinearMixedModel[, y::Vector]; - fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ)) + fast::Bool = (length(m.θ) == length(m.optsum.final)), + nAGQ::Integer = m.optsum.nAGQ, + kwargs...) Refit the model `m` after installing response `y`. If `y` is omitted the current response vector is used. If not specified, the `fast` and `nAGQ` options from the previous fit are used. - +`kwargs` are the same as [`fit!`](@ref) """ -function refit!(m::GeneralizedLinearMixedModel{T}; +function refit!(m::GeneralizedLinearMixedModel; fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ, progress::Bool=false) where T + nAGQ::Integer = m.optsum.nAGQ, kwargs...) - fit!(unfit!(m); fast=fast, nAGQ=nAGQ, progress=progress) + fit!(unfit!(m); fast, nAGQ, kwargs...) end -function refit!(m::GeneralizedLinearMixedModel{T}, y; +function refit!(m::GeneralizedLinearMixedModel, y; fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ, progress::Bool=false) where T + nAGQ::Integer = m.optsum.nAGQ, kwargs...) m_resp_y = m.resp.y length(y) == size(m_resp_y, 1) || throw(DimensionMismatch("")) copyto!(m_resp_y, y) - refit!(m; progress=progress) + refit!(m; fast, nAGQ, kwargs...) end """ diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index f718bc34c..ccb61f6bd 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -386,7 +386,7 @@ end """ fit!(m::LinearMixedModel[; progress::Bool=true, REML::Bool=false]) -Optimize the objective of a `LinearMixedModel`. When `progress` is `true` a +Optimize the objective of a `LinearMixedModel`. When `progress` is `true` a `ProgressMeter.ProgressUnknown` display is shown during the optimization of the objective, if the optimization takes more than one second or so. """ @@ -775,22 +775,22 @@ function reevaluateAend!(m::LinearMixedModel) end """ - refit!(m::LinearMixedModel[, y::Vector]; REML=m.optsum.REML, progress::Bool=false) + refit!(m::LinearMixedModel[, y::Vector]; REML=m.optsum.REML, kwargs...) Refit the model `m` after installing response `y`. -If `y` is omitted the current response vector is used. Note that the default for -`progress` is `false` on refits (whereas it is `true` in the `fit!` methods.) +If `y` is omitted the current response vector is used. +`kwargs` are the same as [`fit!`](@ref). """ -function refit!(m::LinearMixedModel; REML=m.optsum.REML, progress::Bool=false) - fit!(unfit!(m); REML=REML, progress=progress) +function refit!(m::LinearMixedModel; REML=m.optsum.REML, kwargs...) + fit!(unfit!(m); REML=REML, kwargs...) end -function refit!(m::LinearMixedModel, y; REML=m.optsum.REML, progress::Bool=false) +function refit!(m::LinearMixedModel, y; REML=m.optsum.REML, kwargs...) resp = m.y length(y) == length(resp) || throw(DimensionMismatch("")) copyto!(resp, y) - refit!(m; REML=REML, progress=progress) + refit!(m; REML=REML, kwargs...) end """ From 7cabb81299dabbd0e7e6571c76200f3b32782ef3 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 13:23:13 -0500 Subject: [PATCH 15/23] simplify second method --- src/generalizedlinearmixedmodel.jl | 6 ++---- src/linearmixedmodel.jl | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 45893dd95..b5765f5f4 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -595,13 +595,11 @@ function refit!(m::GeneralizedLinearMixedModel; fit!(unfit!(m); fast, nAGQ, kwargs...) end -function refit!(m::GeneralizedLinearMixedModel, y; - fast::Bool = (length(m.θ) == length(m.optsum.final)), - nAGQ::Integer = m.optsum.nAGQ, kwargs...) +function refit!(m::GeneralizedLinearMixedModel, y; kwargs...) m_resp_y = m.resp.y length(y) == size(m_resp_y, 1) || throw(DimensionMismatch("")) copyto!(m_resp_y, y) - refit!(m; fast, nAGQ, kwargs...) + refit!(m; kwargs...) end """ diff --git a/src/linearmixedmodel.jl b/src/linearmixedmodel.jl index ccb61f6bd..e0b712f88 100644 --- a/src/linearmixedmodel.jl +++ b/src/linearmixedmodel.jl @@ -786,11 +786,11 @@ function refit!(m::LinearMixedModel; REML=m.optsum.REML, kwargs...) fit!(unfit!(m); REML=REML, kwargs...) end -function refit!(m::LinearMixedModel, y; REML=m.optsum.REML, kwargs...) +function refit!(m::LinearMixedModel, y; kwargs...) resp = m.y length(y) == length(resp) || throw(DimensionMismatch("")) copyto!(resp, y) - refit!(m; REML=REML, kwargs...) + refit!(m; kwargs...) end """ From a9d984a0db3b67b479b73aed4fe42745e309bb79 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 13:43:06 -0500 Subject: [PATCH 16/23] fix tests for latent floating point issue --- src/generalizedlinearmixedmodel.jl | 5 +++-- test/pirls.jl | 15 +++++++++++---- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index b5765f5f4..21334cb0b 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -591,8 +591,9 @@ If not specified, the `fast` and `nAGQ` options from the previous fit are used. function refit!(m::GeneralizedLinearMixedModel; fast::Bool = (length(m.θ) == length(m.optsum.final)), nAGQ::Integer = m.optsum.nAGQ, kwargs...) - - fit!(unfit!(m); fast, nAGQ, kwargs...) + @show fast + @show nAGQ + fit!(unfit!(m); fast=fast, nAGQ=nAGQ, kwargs...) end function refit!(m::GeneralizedLinearMixedModel, y; kwargs...) diff --git a/test/pirls.jl b/test/pirls.jl index adea18d32..e8ae3dd2f 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -74,7 +74,7 @@ end @testset "cbpp" begin cbpp = dataset(:cbpp) - gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz), progress=false) + gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(); wts=float(cbpp.hsz), progress=false) @test weights(gm2) == cbpp.hsz @test deviance(gm2,true) ≈ 100.09585619892968 atol=0.0001 @test sum(abs2, gm2.u[1]) ≈ 9.723054788538546 atol=0.0001 @@ -91,11 +91,18 @@ end @testset "GLMM refit" begin gm2r = deepcopy(gm2) @test_throws ArgumentError fit!(gm2r) - refit!(gm2r, 1 .- gm2.y; fast=true) - @test gm2r.β ≈ -gm2.β atol=1e-3 + + refit!(gm2r; fast=true, progress=false) + @test length(gm2r.optsum.final) == 1 @test gm2r.θ ≈ gm2.θ atol=1e-3 - refit!(gm2r, 1 .- gm2.y; fast=false) + # swapping successes and failures to give us the same model + # but with opposite signs. healthy ≈ 1 - response(gm2r) + # but defining it in terms of the original values avoids some + # nasty floating point issues + healthy = @. (cbpp.hsz - cbpp.incid) / cbpp.hsz + refit!(gm2r, healthy; fast=false, progress=false) + @test length(gm2r.optsum.final) == 5 @test gm2r.β ≈ -gm2.β atol=1e-3 @test gm2r.θ ≈ gm2.θ atol=1e-3 end From 55a6d38324edd79ad32dffdbb1147ca49c23c649 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 13:46:40 -0500 Subject: [PATCH 17/23] remove debug code --- src/generalizedlinearmixedmodel.jl | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 21334cb0b..18827b718 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -591,8 +591,6 @@ If not specified, the `fast` and `nAGQ` options from the previous fit are used. function refit!(m::GeneralizedLinearMixedModel; fast::Bool = (length(m.θ) == length(m.optsum.final)), nAGQ::Integer = m.optsum.nAGQ, kwargs...) - @show fast - @show nAGQ fit!(unfit!(m); fast=fast, nAGQ=nAGQ, kwargs...) end From 39c3d0c43fcd17addc127c76ba937d252eb6c07a Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 18:48:32 -0500 Subject: [PATCH 18/23] pass progress=false in refit! in tests --- test/bootstrap.jl | 16 ++++++++-------- test/linalg.jl | 2 +- test/pirls.jl | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/test/bootstrap.jl b/test/bootstrap.jl index e9aa3302e..26b430ac9 100644 --- a/test/bootstrap.jl +++ b/test/bootstrap.jl @@ -20,34 +20,34 @@ include("modelcache.jl") resp₀ = copy(response(fm)) # type conversion of ints to floats simulate!(StableRNG(1234321), fm, β=[1], σ=1) - refit!(fm,resp₀) - refit!(simulate!(StableRNG(1234321), fm)) + refit!(fm, resp₀; progress=false) + refit!(simulate!(StableRNG(1234321), fm); progress=false) @test deviance(fm) ≈ 322.6582 atol=0.001 - refit!(fm, float(ds.yield)) + refit!(fm, float(ds.yield), progress=false) # Global/implicit RNG method Random.seed!(1234321) - refit!(simulate!(fm)) + refit!(simulate!(fm); progress=false) # just make sure this worked, don't check fit # (because the RNG can change between Julia versions) @test response(fm) ≠ resp₀ simulate!(fm, θ = fm.θ) - @test_throws DimensionMismatch refit!(fm, zeros(29)) + @test_throws DimensionMismatch refit!(fm, zeros(29); progress=false) # restore the original state - refit!(fm, vec(float.(ds.yield))) + refit!(fm, vec(float.(ds.yield)); progress=false) end @testset "Poisson" begin center(v::AbstractVector) = v .- (sum(v) / length(v)) grouseticks = DataFrame(dataset(:grouseticks)) grouseticks.ch = center(grouseticks.height) gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true, progress=false) # fails in pirls! with fast=false - gm4sim = refit!(simulate!(StableRNG(42), deepcopy(gm4))) + gm4sim = refit!(simulate!(StableRNG(42), deepcopy(gm4)); progress=false) @test isapprox(gm4.β, gm4sim.β; atol=norm(stderror(gm4))) end @testset "Binomial" begin cbpp = dataset(:cbpp) gm2 = fit(MixedModel, first(gfms[:cbpp]), cbpp, Binomial(), wts=float(cbpp.hsz), progress=false) - gm2sim = refit!(simulate!(StableRNG(42), deepcopy(gm2)), fast=true) + gm2sim = refit!(simulate!(StableRNG(42), deepcopy(gm2)); fast=true, progress=false) @test isapprox(gm2.β, gm2sim.β; atol=norm(stderror(gm2))) end @testset "_rand with dispersion" begin diff --git a/test/linalg.jl b/test/linalg.jl index 26b2c79f0..4caa4eb10 100644 --- a/test/linalg.jl +++ b/test/linalg.jl @@ -49,7 +49,7 @@ end wm1 = fit(MixedModel, @formula(Y ~ 1+A+(1+A|G)+(1+A|H)), df, wts=ones(400), progress=false) @test loglikelihood(wm1) ≈ loglikelihood(m1) MixedModels.reweight!(wm1, ones(400)) - @test loglikelihood(refit!(wm1)) ≈ loglikelihood(m1) + @test loglikelihood(refit!(wm1, progress=false)) ≈ loglikelihood(m1) end @testset "rankupdate!" begin diff --git a/test/pirls.jl b/test/pirls.jl index e8ae3dd2f..3dd242457 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -45,7 +45,7 @@ include("modelcache.jl") @test dof(gm0) == length(gm0.β) + length(gm0.θ) @test nobs(gm0) == 1934 - refit!(gm0, fast=true, nAGQ=7) + refit!(gm0, fast=true, nAGQ=7, progress=false) @test isapprox(deviance(gm0), 2360.9838, atol=0.001) gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), nAGQ=7, progress=false) @test isapprox(deviance(gm1), 2360.8760, atol=0.001) From 0ee090ff40a56c365cc647c161714f0aa10fafe5 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 19:08:10 -0500 Subject: [PATCH 19/23] More cases of adding progress=false in refit! call --- test/mime.jl | 4 ++-- test/optsummary.jl | 2 +- test/pls.jl | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/mime.jl b/test/mime.jl index 3492b38be..a5f8c8961 100644 --- a/test/mime.jl +++ b/test/mime.jl @@ -199,5 +199,5 @@ rows & subj & item & fixed \\\\ end # return these models to their fitted state for the cache -refit!(fm1) -refit!(fm0) +refit!(fm1; progress=false) +refit!(fm0; progress=false) diff --git a/test/optsummary.jl b/test/optsummary.jl index da1de11c6..02bbb799e 100644 --- a/test/optsummary.jl +++ b/test/optsummary.jl @@ -8,7 +8,7 @@ include("modelcache.jl") @testset "maxfeval" begin fm1 = LinearMixedModel(first(fms[:sleepstudy]), dataset(:sleepstudy)) fm1.optsum.maxfeval = 1 - @test_logs (:warn, "NLopt optimization failure: MAXEVAL_REACHED") refit!(fm1) + @test_logs (:warn, "NLopt optimization failure: MAXEVAL_REACHED") refit!(fm1; progress=false) @test fm1.optsum.returnvalue == :MAXEVAL_REACHED @test fm1.optsum.feval == 1 end diff --git a/test/pls.jl b/test/pls.jl index 8c6a7d6a4..4305e0b8e 100644 --- a/test/pls.jl +++ b/test/pls.jl @@ -53,7 +53,7 @@ end output = String(take!(io)) @test startswith(output, "rows:") - refit!(fm1) + refit!(fm1; progress=false) @test isfitted(fm1) @test :θ in propertynames(fm1) @@ -118,7 +118,7 @@ end @test startswith(str, "Variance components:") @test vc.s == sdest(fm1) - refit!(fm1, REML=true) + refit!(fm1; REML=true, progress=false) @test objective(fm1) ≈ 319.65427684225216 atol=0.0001 @test_throws ArgumentError loglikelihood(fm1) @test dof_residual(fm1) ≥ 0 @@ -130,7 +130,7 @@ end @test isa(vc, Matrix{Float64}) @test only(vc) ≈ 375.7167775 rtol=1.e-6 # since we're caching the fits, we should get it back to being correctly fitted - refit!(fm1; REML=false) + refit!(fm1; REML=false, progress=false) end @testset "Dyestuff2" begin @@ -147,9 +147,9 @@ end @test logdet(fm) ≈ 0.0 @test issingular(fm) #### modifies the model - refit!(fm, float(MixedModels.dataset(:dyestuff)[:yield])) + refit!(fm, float(MixedModels.dataset(:dyestuff)[:yield]); progress=false) @test objective(fm) ≈ 327.3270598811428 atol=0.001 - refit!(fm, float(MixedModels.dataset(:dyestuff2)[:yield])) # restore the model in the cache + refit!(fm, float(MixedModels.dataset(:dyestuff2)[:yield]); progress=false) # restore the model in the cache end @testset "penicillin" begin From 384af96394ea87f1adbbab28c8c6c022ee98e281 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 19:37:35 -0500 Subject: [PATCH 20/23] add progress=false in more fit and refit! calls --- test/fit.jl | 2 +- test/likelihoodratiotest.jl | 22 +++++++++++----------- test/missing.jl | 6 +++--- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/test/fit.jl b/test/fit.jl index 622fa506d..48f9f1643 100644 --- a/test/fit.jl +++ b/test/fit.jl @@ -17,7 +17,7 @@ end @test_throws(ArgumentError("use LinearMixedModel for Normal distribution with IdentityLink"), fit(GeneralizedLinearMixedModel, @formula(yield ~ 1 + (1|batch)), - MixedModels.dataset(:dyestuff))) + MixedModels.dataset(:dyestuff); progress=false)) end @testset "Normal Distribution GLMM" begin diff --git a/test/likelihoodratiotest.jl b/test/likelihoodratiotest.jl index e8e47b0dc..c9e9dcddf 100644 --- a/test/likelihoodratiotest.jl +++ b/test/likelihoodratiotest.jl @@ -34,10 +34,10 @@ include("modelcache.jl") # fixed-effects specification in REML and # conversion of internal ArgumentError into @error for StatsModels.isnested kb07 = dataset(:kb07) - m1 = fit(MixedModel, @formula(rt_trunc ~ 1 + prec + (1|subj)), kb07, REML=true) - m2 = fit(MixedModel, @formula(rt_trunc ~ 1 + prec + (1+prec|subj)), kb07, REML=true) + m1 = fit(MixedModel, @formula(rt_trunc ~ 1 + prec + (1|subj)), kb07, REML=true, progress=false) + m2 = fit(MixedModel, @formula(rt_trunc ~ 1 + prec + (1+prec|subj)), kb07, REML=true, progress=false) @test isnested(m1, m2) - m2 = fit(MixedModel, @formula(rt_trunc ~ 1 + (1+prec|subj)), kb07, REML=true) + m2 = fit(MixedModel, @formula(rt_trunc ~ 1 + (1+prec|subj)), kb07, REML=true, progress=false) @test !isnested(m1, m2) end @@ -45,8 +45,8 @@ end @testset "likelihoodratio test" begin slp = dataset(:sleepstudy); - fm0 = fit(MixedModel,@formula(reaction ~ 1 + (1+days|subj)),slp); - fm1 = fit(MixedModel,@formula(reaction ~ 1 + days + (1+days|subj)),slp); + fm0 = fit(MixedModel,@formula(reaction ~ 1 + (1+days|subj)),slp, progress=false); + fm1 = fit(MixedModel,@formula(reaction ~ 1 + days + (1+days|subj)),slp, progress=false); lm0 = lm(@formula(reaction ~ 1), slp) lm1 = lm(@formula(reaction ~ 1 + days), slp) @@ -77,12 +77,12 @@ end @test_throws ArgumentError likelihoodratiotest(lm1, fm0) # mix of REML and ML - fm0 = fit(MixedModel,@formula(reaction ~ 1 + (1+days|subj)),slp, REML=true); + fm0 = fit(MixedModel,@formula(reaction ~ 1 + (1+days|subj)),slp, REML=true, progress=false); @test_throws ArgumentError likelihoodratiotest(fm0,fm1) @test_throws ArgumentError likelihoodratiotest(lm0,fm0) # differing FE with REML - fm1 = fit(MixedModel,@formula(reaction ~ 1 + days + (1+days|subj)),slp, REML=true); + fm1 = fit(MixedModel,@formula(reaction ~ 1 + days + (1+days|subj)),slp, REML=true, progress=false); @test_throws ArgumentError likelihoodratiotest(fm0,fm1) contra = MixedModels.dataset(:contra); @@ -92,8 +92,8 @@ end cc.usenum = ifelse.(cc.use .== "Y", 1 , 0) gmf = glm(@formula(usenum ~ 1+age+urban+livch), cc, Bernoulli()); gmf2 = glm(@formula(usenum ~ 1+age+abs2(age)+urban+livch), cc, Bernoulli()); - gm0 = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Bernoulli(), fast=true); - gm1 = fit(MixedModel, @formula(use ~ 1+age+abs2(age)+urban+livch+(1|urban&dist)), contra, Bernoulli(), fast=true); + gm0 = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Bernoulli(), fast=true, progress=false); + gm1 = fit(MixedModel, @formula(use ~ 1+age+abs2(age)+urban+livch+(1|urban&dist)), contra, Bernoulli(), fast=true, progress=false); lrt = likelihoodratiotest(gmf, gm1) @test [-2 * loglikelihood(gmf), deviance(gm1)] ≈ lrt.deviance @@ -112,12 +112,12 @@ end @test length(lrt.formulae) == 2 # mismatched links - gm_probit = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Bernoulli(), ProbitLink(), fast=true); + gm_probit = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Bernoulli(), ProbitLink(), fast=true, progress=false); @test_throws ArgumentError likelihoodratiotest(gmf, gm_probit) @test_throws ArgumentError likelihoodratiotest(gm0, gm_probit) # mismatched families - gm_poisson = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Poisson(), fast=true); + gm_poisson = fit(MixedModel, @formula(use ~ 1+age+urban+livch+(1|urban&dist)), contra, Poisson(), fast=true, progress=false); @test_throws ArgumentError likelihoodratiotest(gmf, gm_poisson) @test_throws ArgumentError likelihoodratiotest(gm0, gm_poisson) diff --git a/test/missing.jl b/test/missing.jl index 8da3be5bf..72620a33f 100644 --- a/test/missing.jl +++ b/test/missing.jl @@ -21,14 +21,14 @@ slp[1,:days] = missing @testset "Missing Omit" begin @testset "Missing from unused variables" begin # missing from unused variables should have no impact - m1 = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), MixedModels.dataset(:sleepstudy)) + m1 = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), MixedModels.dataset(:sleepstudy), progress=false) m1_missing = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), slp) @test isapprox(m1.θ, m1_missing.θ, rtol=1.0e-12) end @testset "Missing from used variables" begin - m1 = fit(MixedModel, @formula(reaction ~ 1 + days + (1|subj)), MixedModels.dataset(:sleepstudy)) - m1_missing = fit(MixedModel, @formula(reaction ~ 1 + days + (1|subj)), slp) + m1 = fit(MixedModel, @formula(reaction ~ 1 + days + (1|subj)), MixedModels.dataset(:sleepstudy), progress=false) + m1_missing = fit(MixedModel, @formula(reaction ~ 1 + days + (1|subj)), slp, progress=false) @test nobs(m1) - nobs(m1_missing) == 1 end end From 8902b553bc1974d0979a11936d8bcec5a7b9859c Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 12 Jul 2021 19:54:11 -0500 Subject: [PATCH 21/23] yet another progress=false --- test/missing.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/missing.jl b/test/missing.jl index 72620a33f..53d29770f 100644 --- a/test/missing.jl +++ b/test/missing.jl @@ -22,7 +22,7 @@ slp[1,:days] = missing @testset "Missing from unused variables" begin # missing from unused variables should have no impact m1 = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), MixedModels.dataset(:sleepstudy), progress=false) - m1_missing = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), slp) + m1_missing = fit(MixedModel, @formula(reaction ~ 1 + (1|subj)), slp, progress=false) @test isapprox(m1.θ, m1_missing.θ, rtol=1.0e-12) end From 1222a512fd46adb7ddfe3751c4a3349a3fd5a0d1 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 22:15:07 -0500 Subject: [PATCH 22/23] disable single-model progress in bootstrap --- src/bootstrap.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap.jl b/src/bootstrap.jl index 7051599c9..dbf7a0bec 100644 --- a/src/bootstrap.jl +++ b/src/bootstrap.jl @@ -106,7 +106,7 @@ function parametricbootstrap( lock(rnglock) mod = simulate!(rng, mod, β = β, σ = σ, θ = θ) unlock(rnglock) - refit!(mod) + refit!(mod; progress=false) ( objective = mod.objective, σ = mod.σ, From 491a4d144e597b56948ac568c3670587ef70aca5 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 12 Jul 2021 22:17:56 -0500 Subject: [PATCH 23/23] whack-a-mole + ; before kwargs --- test/linalg.jl | 2 +- test/modelcache.jl | 2 +- test/optsummary.jl | 2 +- test/pirls.jl | 14 +++++++------- test/pls.jl | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/test/linalg.jl b/test/linalg.jl index 4caa4eb10..b011c2d3d 100644 --- a/test/linalg.jl +++ b/test/linalg.jl @@ -93,7 +93,7 @@ end # an example in MixedModels.jl#123 df = gendata(10000, 500) f = @formula(Y ~ (1 + X|H) + (1|G)) - m500 = fit!(LinearMixedModel(f, df), progress=false) + m500 = fit!(LinearMixedModel(f, df); progress=false) # the real test here isn't in the theta comparison but in that the fit # completes successfully @test length(m500.u) == 2 diff --git a/test/modelcache.jl b/test/modelcache.jl index 5512bf860..eaaeadee0 100644 --- a/test/modelcache.jl +++ b/test/modelcache.jl @@ -45,6 +45,6 @@ using MixedModels: dataset function models(nm::Symbol) get!(fittedmodels, nm) do - [fit(MixedModel, f, dataset(nm), progress=false) for f in allfms[nm]] + [fit(MixedModel, f, dataset(nm); progress=false) for f in allfms[nm]] end end diff --git a/test/optsummary.jl b/test/optsummary.jl index 02bbb799e..69c4d9897 100644 --- a/test/optsummary.jl +++ b/test/optsummary.jl @@ -19,7 +19,7 @@ include("modelcache.jl") fm1 = LinearMixedModel(last(fms[:kb07]), dataset(:kb07)) maxtime = 1e-6 fm1.optsum.maxtime = maxtime - @test_logs (:warn, "NLopt optimization failure: MAXTIME_REACHED") fit!(fm1) + @test_logs (:warn, "NLopt optimization failure: MAXTIME_REACHED") fit!(fm1; progress=false) @test fm1.optsum.returnvalue == :MAXTIME_REACHED @test fm1.optsum.maxtime == maxtime end diff --git a/test/pirls.jl b/test/pirls.jl index 3dd242457..f306945c5 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -13,7 +13,7 @@ include("modelcache.jl") @testset "contra" begin contra = dataset(:contra) - gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), fast=true, progress=false) + gm0 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(); fast=true, progress=false) @test gm0.lowerbd == zeros(1) @test isapprox(gm0.θ, [0.5720734451352923], atol=0.001) @test !issingular(gm0) @@ -45,9 +45,9 @@ include("modelcache.jl") @test dof(gm0) == length(gm0.β) + length(gm0.θ) @test nobs(gm0) == 1934 - refit!(gm0, fast=true, nAGQ=7, progress=false) + refit!(gm0; fast=true, nAGQ=7, progress=false) @test isapprox(deviance(gm0), 2360.9838, atol=0.001) - gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(), nAGQ=7, progress=false) + gm1 = fit(MixedModel, only(gfms[:contra]), contra, Bernoulli(); nAGQ=7, progress=false) @test isapprox(deviance(gm1), 2360.8760, atol=0.001) @test gm1.β == gm1.beta @test gm1.θ == gm1.theta @@ -90,7 +90,7 @@ end @testset "GLMM refit" begin gm2r = deepcopy(gm2) - @test_throws ArgumentError fit!(gm2r) + @test_throws ArgumentError fit!(gm2r; progress=false) refit!(gm2r; fast=true, progress=false) @test length(gm2r.optsum.final) == 1 @@ -109,7 +109,7 @@ end end @testset "verbagg" begin - gm3 = fit(MixedModel, only(gfms[:verbagg]), dataset(:verbagg), Bernoulli(), progress=false) + gm3 = fit(MixedModel, only(gfms[:verbagg]), dataset(:verbagg), Bernoulli(); progress=false) @test deviance(gm3) ≈ 8151.40 rtol=1e-5 @test lowerbd(gm3) == vcat(fill(-Inf, 6), zeros(2)) @test fitted(gm3) == predict(gm3) @@ -122,7 +122,7 @@ end center(v::AbstractVector) = v .- (sum(v) / length(v)) grouseticks = DataFrame(dataset(:grouseticks)) grouseticks.ch = center(grouseticks.height) - gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(), fast=true, progress=false) # fails in pirls! with fast=false + gm4 = fit(MixedModel, only(gfms[:grouseticks]), grouseticks, Poisson(); fast=true, progress=false) # fails in pirls! with fast=false @test isapprox(deviance(gm4), 851.4046, atol=0.001) # these two values are not well defined at the optimum #@test isapprox(sum(x -> sum(abs2, x), gm4.u), 196.8695297987013, atol=0.1) @@ -159,7 +159,7 @@ end @test deviance(m1) ≈ 193.5587302384811 rtol=1.e-5 @test only(m1.β) ≈ 4.192196439077657 atol=1.e-5 @test only(m1.θ) ≈ 1.838245201739852 atol=1.e-5 - m11 = fit(MixedModel, gform, goldstein, Poisson(), nAGQ=11, progress=false) + m11 = fit(MixedModel, gform, goldstein, Poisson(); nAGQ=11, progress=false) @test deviance(m11) ≈ 193.51028088736842 rtol=1.e-5 @test only(m11.β) ≈ 4.192196439077657 atol=1.e-5 @test only(m11.θ) ≈ 1.838245201739852 atol=1.e-5 diff --git a/test/pls.jl b/test/pls.jl index 4305e0b8e..3f7ed7ff4 100644 --- a/test/pls.jl +++ b/test/pls.jl @@ -540,13 +540,13 @@ end ) #= no need to fit yet another model without weights, but here are the reference values from lme4 - m1 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data) + m1 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data; progress=false) @test m1.θ ≈ [0.0] @test stderror(m1) ≈ [1.084912, 4.966336] atol = 1.e-4 @test vcov(m1) ≈ [1.177035 -4.802598; -4.802598 24.664497] atol = 1.e-4 =# - m2 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data, wts = data.w1, progress=false) + m2 = fit(MixedModel, @formula(a ~ 1 + b + (1|c)), data; wts = data.w1, progress=false) @test m2.θ ≈ [0.295181729258352] atol = 1.e-4 @test stderror(m2) ≈ [0.9640167, 3.6309696] atol = 1.e-4 @test vcov(m2) ≈ [0.9293282 -2.557527; -2.5575267 13.183940] atol = 1.e-4