Skip to content

Commit

Permalink
Merge pull request #7 from tbeason/optim
Browse files Browse the repository at this point in the history
remove optim dep, borrow functionality, update deps
  • Loading branch information
tbeason authored Jun 13, 2022
2 parents 3c4633d + 0a01e6e commit ff9b306
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 10 deletions.
6 changes: 2 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
name = "NonparametricRegression"
uuid = "db432338-e110-4b7a-9c53-0ace38eb8f7f"
authors = ["Tyler Beason <[email protected]>"]
version = "0.2.0"
version = "0.2.1"

[deps]
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[compat]
DocStringExtensions = "0.8, 0.9"
Optim = "1"
StaticArrays = "1.2, 1.3"
StaticArrays = "1.2, 1.3, 1.4"
julia = "1.6"

[extras]
Expand Down
12 changes: 6 additions & 6 deletions src/NonparametricRegression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ module NonparametricRegression
using LinearAlgebra
using Statistics
using StaticArrays
using Optim
using DocStringExtensions


Expand All @@ -19,6 +18,7 @@ export optimalbandwidth, leaveoneoutCV, optimizeAICc
export npregress

include("kernels.jl")
include("univariateopt.jl")


########################################
Expand Down Expand Up @@ -319,8 +319,8 @@ end
function leaveoneoutCV(x, y; kernelfun=NormalKernel, method=:lc, hLB = silvermanbw(y)/100, hUB = silvermanbw(y)*100,trimmed=true)
objfun(h) = leaveoneoutCV_mse(h,x,y; kernelfun, method, trimmed)
opt = optimize(objfun,hLB,hUB)
@assert opt.converged "Convergence failed, cannot find optimal bandwidth."
return Optim.minimizer(opt)
# @assert opt.converged "Convergence failed, cannot find optimal bandwidth."
return opt
end


Expand All @@ -337,8 +337,8 @@ end
function optimizeAICc(x, y; kernelfun=NormalKernel, method=:lc, hLB = silvermanbw(y)/100, hUB = silvermanbw(y)*100)
objfun(h) = estimatorAICc(h,x,y; kernelfun, method)
opt = optimize(objfun,hLB,hUB)
@assert opt.converged "Convergence failed, cannot find optimal bandwidth."
return Optim.minimizer(opt)
# @assert opt.converged "Convergence failed, cannot find optimal bandwidth."
return opt
end


Expand Down Expand Up @@ -402,7 +402,7 @@ The keyword argument `method` can be `:lc` for a local constant estimator (Nadar
The keyword argument `bandwidthselection` should be `:aicc` for the bias-correct AICc method or `:loocv` for leave-one-out cross validation. `hLB` and `hUB` are the lower and upper bounds used when searching for the optimal bandwidth.
The keyword argument `kernelfun` should be a function that constructs a `KernelFunctions.jl` kernel with a given bandwidth. Defaults to `NormalKernel` (defined and exported by this package.)
The keyword argument `kernelfun` should be a function that constructs a kernel with a given bandwidth. Defaults to `NormalKernel` (defined and exported by this package.)
"""
function npregress(x, y, xgrid=x; kernelfun=NormalKernel, method=:lc, bandwidthselection=:aicc, hLB = silvermanbw(y)/100, hUB = silvermanbw(y)*100)
h = optimalbandwidth(x,y; kernelfun, method, bandwidthselection, hLB, hUB)
Expand Down
73 changes: 73 additions & 0 deletions src/univariateopt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@

# borrowed from KernelDensity.jl
# https://github.com/JuliaStats/KernelDensity.jl/blob/master/src/univariate.jl

"""
optimize(f, x_lower, x_upper; iterations=1000, rel_tol=nothing, abs_tol=nothing)
Minimize the function `f` in the interval `x_lower..x_upper`, using the
[golden-section search](https://en.wikipedia.org/wiki/Golden-section_search).
Return an approximate minimum `x̃` or error if such approximate minimum cannot be found.
This algorithm assumes that `-f` is unimodal on the interval `x_lower..x_upper`,
that is to say, there exists a unique `x` in `x_lower..x_upper` such that `f` is
decreasing on `x_lower..x` and increasing on `x..x_upper`.
`rel_tol` and `abs_tol` determine the relative and absolute tolerance, that is
to say, the returned value `x̃` should differ from the actual minimum `x` at most
`abs_tol + rel_tol * abs(x̃)`.
If not manually specified, `rel_tol` and `abs_tol` default to `sqrt(eps(T))` and
`eps(T)` respectively, where `T` is the floating point type of `x_lower` and `x_upper`.
`iterations` determines the maximum number of iterations allowed before convergence.
This is a private, unexported function, used internally to select the optimal bandwidth
automatically.
"""
function optimize(f, x_lower, x_upper; iterations=1000, rel_tol=nothing, abs_tol=nothing)

if x_lower > x_upper
error("x_lower must be less than x_upper")
end

T = promote_type(typeof(x_lower/1), typeof(x_upper/1))
rtol = something(rel_tol, sqrt(eps(T)))
atol = something(abs_tol, eps(T))

function midpoint_and_convergence(lower, upper)
midpoint = (lower + upper) / 2
tol = atol + rtol * midpoint
midpoint, (upper - lower) <= 2tol
end

invphi::T = 0.5 * (sqrt(5) - 1)
invphisq::T = 0.5 * (3 - sqrt(5))

a::T, b::T = x_lower, x_upper
h = b - a
c = a + invphisq * h
d = a + invphi * h

fc, fd = f(c), f(d)

for _ in 1:iterations
h *= invphi
if fc < fd
m, converged = midpoint_and_convergence(a, d)
converged && return m
b = d
d, fd = c, fc
c = a + invphisq * h
fc = f(c)
else
m, converged = midpoint_and_convergence(c, b)
converged && return m
a = c
c, fc = d, fd
d = a + invphi * h
fd = f(d)
end
end

error("Reached maximum number of iterations without convergence.")
end

0 comments on commit ff9b306

Please sign in to comment.