Skip to content

Commit

Permalink
Consistently refer to TracerSparsityDetector as detector (#191)
Browse files Browse the repository at this point in the history
instead of `method`
  • Loading branch information
adrhill authored Sep 5, 2024
1 parent 87f026a commit 4637817
Show file tree
Hide file tree
Showing 13 changed files with 92 additions and 92 deletions.
14 changes: 7 additions & 7 deletions benchmark/hessian.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ Test cases taken from the article:
> https://www.tandfonline.com/doi/full/10.1080/10556788.2018.1480625
=#

function hessbench(method)
function hessbench(detector)
suite = BenchmarkGroup()
suite["ArrowHead"] = hessbench_arrowhead(method)
suite["RandomSparsity"] = hessbench_randomsparsity(method)
suite["ArrowHead"] = hessbench_arrowhead(detector)
suite["RandomSparsity"] = hessbench_randomsparsity(detector)
return suite
end

Expand All @@ -32,7 +32,7 @@ function (ah::ArrowHead)(x::AbstractVector)
end
end

function hessbench_arrowhead(method)
function hessbench_arrowhead(detector)
suite = BenchmarkGroup()
# Commented-out cases (N, K) are included in the JuMP paper linked above,
# but excluded from to accelerate the benchmark suite.
Expand All @@ -48,7 +48,7 @@ function hessbench_arrowhead(method)
]
x = rand(N)
f = ArrowHead(K)
suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $method)
suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $detector)
end
return suite
end
Expand All @@ -70,7 +70,7 @@ function (rs::RandomSparsity)(x::AbstractVector)
end
end

function hessbench_randomsparsity(method)
function hessbench_randomsparsity(detector)
suite = BenchmarkGroup()
# Commented-out cases (N, K) are included in the JuMP paper linked above,
# but excluded from to accelerate the benchmark suite.
Expand All @@ -86,7 +86,7 @@ function hessbench_randomsparsity(method)
]
x = rand(N)
f = RandomSparsity(N, K)
suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $method)
suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $detector)
end
return suite
end
Expand Down
22 changes: 11 additions & 11 deletions benchmark/jacobian.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ using SparseArrays: sprand
using SimpleDiffEq: ODEProblem, solve, SimpleEuler
using Flux: Conv

function jacbench(method)
function jacbench(detector)
suite = BenchmarkGroup()
suite["SparseMul"] = jacbench_sparsemul(method)
suite["Brusselator"] = jacbench_brusselator(method)
suite["Conv"] = jacbench_conv(method)
suite["SparseMul"] = jacbench_sparsemul(detector)
suite["Brusselator"] = jacbench_brusselator(detector)
suite["Conv"] = jacbench_conv(detector)
return suite
end

Expand All @@ -35,48 +35,48 @@ function (ism::IteratedSparseMul)(x::AbstractVector)
return y
end

function jacbench_sparsemul(method)
function jacbench_sparsemul(detector)
suite = BenchmarkGroup()
for n in [50], p in [0.01, 0.25], depth in [5]
x = rand(n)
f = IteratedSparseMul(; n, p, depth)
suite["n=$n, p=$p, depth=$depth"] = @benchmarkable jacobian_sparsity(
$f, $x, $method
$f, $x, $detector
)
end
return suite
end

## Brusselator

function jacbench_brusselator(method)
function jacbench_brusselator(detector)
suite = BenchmarkGroup()
for N in (6, 24)
f! = Brusselator!(N)
x = rand(N, N, 2)
y = similar(x)
suite["operator"]["N=$N"] = @benchmarkable jacobian_sparsity($f!, $y, $x, $method)
suite["operator"]["N=$N"] = @benchmarkable jacobian_sparsity($f!, $y, $x, $detector)
solver = SimpleEuler()
prob = ODEProblem(brusselator_2d_loop!, x, (0.0, 1.0), f!.params)
function brusselator_ode_solve(x)
return solve(ODEProblem(brusselator_2d_loop!, x, (0.0, 1.0), f!.params), solver; dt=0.5).u[end]
end
suite["ODE"]["N=$N"] = @benchmarkable jacobian_sparsity(
$brusselator_ode_solve, $x, $method
$brusselator_ode_solve, $x, $detector
)
end
return suite
end

## Convolution

function jacbench_conv(method)
function jacbench_conv(detector)
# TODO: benchmark local sparsity tracers on LeNet-5 CNN
layer = Conv((5, 5), 3 => 2)
suite = BenchmarkGroup()
for N in (28, 128)
suite["N=$N"] = @benchmarkable jacobian_sparsity(
$layer, $(rand(N, N, 3, 1)), $method
$layer, $(rand(N, N, 3, 1)), $detector
)
end
return suite
Expand Down
22 changes: 11 additions & 11 deletions docs/src/user/global_vs_local.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,28 +24,28 @@ These are computed by [`TracerLocalSparsityDetector`](@ref):

```@repl localvsglobal
using SparseConnectivityTracer
method = TracerLocalSparsityDetector();
detector = TracerLocalSparsityDetector();
f(x) = x[1]*x[2];
jacobian_sparsity(f, [1, 1], method)
jacobian_sparsity(f, [0, 1], method)
jacobian_sparsity(f, [1, 0], method)
jacobian_sparsity(f, [0, 0], method)
jacobian_sparsity(f, [1, 1], detector)
jacobian_sparsity(f, [0, 1], detector)
jacobian_sparsity(f, [1, 0], detector)
jacobian_sparsity(f, [0, 0], detector)
```

In contrast to this, [`TracerSparsityDetector`](@ref) computes a conservative union over all sparsity patterns in $\mathbf{x} \in \mathbb{R}^2$.
The resulting **global** pattern therefore does not depend on the input.
All of the following function calls are equivalent:

```@repl localvsglobal
method = TracerSparsityDetector();
detector = TracerSparsityDetector()
jacobian_sparsity(f, [1, 1], method)
jacobian_sparsity(f, [0, 1], method)
jacobian_sparsity(f, [1, 0], method)
jacobian_sparsity(f, [0, 0], method)
jacobian_sparsity(f, rand(2), method)
jacobian_sparsity(f, [1, 1], detector)
jacobian_sparsity(f, [0, 1], detector)
jacobian_sparsity(f, [1, 0], detector)
jacobian_sparsity(f, [0, 0], detector)
jacobian_sparsity(f, rand(2), detector)
```

!!! tip "Global vs. Local"
Expand Down
6 changes: 3 additions & 3 deletions docs/src/user/limitations.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ it must be written generically enough to accept numbers of type `T<:Real` as (or

```@example notgeneric
using SparseConnectivityTracer
method = TracerSparsityDetector()
detector = TracerSparsityDetector()

relu_bad(x::AbstractFloat) = max(zero(x), x)
outer_function_bad(xs) = sum(relu_bad, xs)
Expand All @@ -39,7 +39,7 @@ it must be written generically enough to accept numbers of type `T<:Real` as (or

outer_function_bad(xs)

jacobian_sparsity(outer_function_bad, xs, method)
jacobian_sparsity(outer_function_bad, xs, detector)
```

This is easily fixed by loosening type restrictions or adding an additional methods on `Real`:
Expand All @@ -51,7 +51,7 @@ it must be written generically enough to accept numbers of type `T<:Real` as (or
```

```@repl notgeneric
jacobian_sparsity(outer_function_good, xs, method)
jacobian_sparsity(outer_function_good, xs, detector)
```

## Limited control flow
Expand Down
8 changes: 4 additions & 4 deletions test/brusselator.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,18 @@ using Test
# Load definitions of GRADIENT_TRACERS, GRADIENT_PATTERNS, HESSIAN_TRACERS and HESSIAN_PATTERNS
include("tracers_definitions.jl")

function test_brusselator(method::AbstractSparsityDetector)
function test_brusselator(detector::AbstractSparsityDetector)
N = 6
f! = Brusselator!(N)
x = rand(N, N, 2)
y = similar(x)

J = ADTypes.jacobian_sparsity(f!, y, x, method)
J = ADTypes.jacobian_sparsity(f!, y, x, detector)
@test_reference "references/pattern/jacobian/Brusselator.txt" BitMatrix(J)
end

@testset "$T" for T in GRADIENT_TRACERS
method = TracerSparsityDetector(; gradient_tracer_type=T)
test_brusselator(method)
detector = TracerSparsityDetector(; gradient_tracer_type=T)
test_brusselator(detector)
yield()
end
16 changes: 8 additions & 8 deletions test/ext/test_LogExpFunctions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ lef_1_to_1 = union(lef_1_to_1_pos_input, lef_1_to_1_neg_input)
lef_2_to_1 = (xlogy, xlog1py, xexpy, logaddexp, logsubexp)

@testset "Jacobian Global" begin
method = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in lef_1_to_1
Expand All @@ -44,8 +44,8 @@ lef_2_to_1 = (xlogy, xlog1py, xexpy, logaddexp, logsubexp)
end

@testset "Jacobian Local" begin
method = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in lef_1_to_1_pos_input
Expand All @@ -63,8 +63,8 @@ end
end

@testset "Hessian Global" begin
method = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in lef_1_to_1
Expand All @@ -79,8 +79,8 @@ end
end

@testset "Hessian Local" begin
method = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in lef_1_to_1_pos_input
Expand Down
16 changes: 8 additions & 8 deletions test/ext/test_NNlib.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,17 @@ NNLIB_ACTIVATIONS_F = (
NNLIB_ACTIVATIONS = union(NNLIB_ACTIVATIONS_S, NNLIB_ACTIVATIONS_F)

@testset "Jacobian Global" begin
method = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@testset "$f" for f in NNLIB_ACTIVATIONS
@test J(f, 1) [1;;]
end
end

@testset "Jacobian Local" begin
method = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@test J(NNlib.relu, -1) [0;;]
@test J(NNlib.relu, 1) [1;;]
Expand Down Expand Up @@ -82,8 +82,8 @@ end
end

@testset "Global Hessian" begin
method = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@testset "First-order differentiable" begin
@testset "$f" for f in NNLIB_ACTIVATIONS_F
Expand All @@ -98,8 +98,8 @@ end
end

@testset "Local Hessian" begin
method = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@test H(NNlib.relu, -1) [0;;]
@test H(NNlib.relu, 1) [0;;]
Expand Down
16 changes: 8 additions & 8 deletions test/ext/test_NaNMath.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ nan_1_to_1 = (
)

@testset "Jacobian Global" begin
method = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in nan_1_to_1
Expand All @@ -38,8 +38,8 @@ nan_1_to_1 = (
end

@testset "Jacobian Local" begin
method = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@testset "2-to-1 functions" begin
@test J(x -> NaNMath.max(x[1], x[2]), [1.0, 2.0, 0.0]) == [0 1 0]
Expand All @@ -50,8 +50,8 @@ end
end

@testset "Hessian Global" begin
method = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@testset "1-to-1 functions" begin
@testset "$f" for f in nan_1_to_1
Expand All @@ -66,8 +66,8 @@ end
end

@testset "Hessian Local" begin
method = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerLocalSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@testset "2-to-1 functions" begin
@test H(x -> NaNMath.max(x[1], x[2]), [1.0, 2.0, 0.0]) == zeros(Bool, 3, 3)
Expand Down
16 changes: 8 additions & 8 deletions test/ext/test_SpecialFunctions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,22 @@ using Test
include("../tracers_definitions.jl")

@testset "Jacobian Global" begin
method = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, method)
detector = TracerSparsityDetector()
J(f, x) = jacobian_sparsity(f, x, detector)

@test J(x -> erf(x[1]), rand(2)) == [1 0]
@test J(x -> beta(x[1], x[2]), rand(3)) == [1 1 0]
end

# TODO: add tests
# @testset "Jacobian Local" begin
# method = TracerLocalSparsityDetector()
# J(f, x) = jacobian_sparsity(f, x, method)
# detector = TracerLocalSparsityDetector()
# J(f, x) = jacobian_sparsity(f, x, detector)
# end

@testset "Global Hessian" begin
method = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, method)
detector = TracerSparsityDetector()
H(f, x) = hessian_sparsity(f, x, detector)

@test H(x -> erf(x[1]), rand(2)) == [
1 0
Expand All @@ -37,6 +37,6 @@ end

# TODO: add tests
# @testset "Local Hessian" begin
# method = TracerLocalSparsityDetector()
# H(f, x) = hessian_sparsity(f, x, method)
# detector = TracerLocalSparsityDetector()
# H(f, x) = hessian_sparsity(f, x, detector)
# end
Loading

0 comments on commit 4637817

Please sign in to comment.