diff --git a/benchmark/hessian.jl b/benchmark/hessian.jl index 38a15f8..46cee26 100644 --- a/benchmark/hessian.jl +++ b/benchmark/hessian.jl @@ -48,7 +48,7 @@ function hessbench_arrowhead(method) ] x = rand(N) f = ArrowHead(K) - suite["(N=$N, K=$K)"] = @benchmarkable hessian_sparsity($f, $x, $method) + suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $method) end return suite end @@ -86,7 +86,7 @@ function hessbench_randomsparsity(method) ] x = rand(N) f = RandomSparsity(N, K) - suite["(N=$N, K=$K)"] = @benchmarkable hessian_sparsity($f, $x, $method) + suite["N=$N, K=$K"] = @benchmarkable hessian_sparsity($f, $x, $method) end return suite end diff --git a/benchmark/jacobian.jl b/benchmark/jacobian.jl index d066dc7..0e114a8 100644 --- a/benchmark/jacobian.jl +++ b/benchmark/jacobian.jl @@ -40,7 +40,7 @@ function jacbench_sparsemul(method) for n in [50], p in [0.01, 0.25], depth in [5] x = rand(n) f = IteratedSparseMul(; n, p, depth) - suite["(n=$n, p=$p, depth=$depth)"] = @benchmarkable jacobian_sparsity( + suite["n=$n, p=$p, depth=$depth"] = @benchmarkable jacobian_sparsity( $f, $x, $method ) end @@ -75,7 +75,7 @@ function jacbench_conv(method) layer = Conv((5, 5), 3 => 2) suite = BenchmarkGroup() for N in (28, 128) - suite["size=$(N)x$(N)x3"] = @benchmarkable jacobian_sparsity( + suite["N=$N"] = @benchmarkable jacobian_sparsity( $layer, $(rand(N, N, 3, 1)), $method ) end