From d495923c0b5d1acf12dc040acc7550872eff56ff Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 15:26:28 -0800 Subject: [PATCH 1/9] drop JSON1 and 2 --- Project.toml | 2 -- src/IncrementalInference.jl | 1 - src/Serialization/services/SerializationMKD.jl | 4 ++-- src/Serialization/services/SerializingDistributions.jl | 8 ++++---- src/services/FGOSUtils.jl | 2 +- test/testPackingMixtures.jl | 4 ++-- 6 files changed, 9 insertions(+), 12 deletions(-) diff --git a/Project.toml b/Project.toml index 5f2c31f34..e3668955d 100644 --- a/Project.toml +++ b/Project.toml @@ -18,8 +18,6 @@ Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" FunctionalStateMachine = "3e9e306e-7e3c-11e9-12d2-8f8f67a2f951" -JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -JSON2 = "2535ab7d-5cd8-5a07-80ac-9b1792aadce3" JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1" KernelDensityEstimate = "2472808a-b354-52ea-a80e-1658a3c6056d" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" diff --git a/src/IncrementalInference.jl b/src/IncrementalInference.jl index 9fc48c55c..849e74a6e 100644 --- a/src/IncrementalInference.jl +++ b/src/IncrementalInference.jl @@ -41,7 +41,6 @@ using Dates, ProgressMeter, DocStringExtensions, FunctionalStateMachine, - JSON2, JSON3, Combinatorics, UUIDs, diff --git a/src/Serialization/services/SerializationMKD.jl b/src/Serialization/services/SerializationMKD.jl index a45a65c0f..344b606a7 100644 --- a/src/Serialization/services/SerializationMKD.jl +++ b/src/Serialization/services/SerializationMKD.jl @@ -75,7 +75,7 @@ end function Base.convert(::Type{String}, mkd::ManifoldKernelDensity) # packedMKD = packDistribution(mkd) - return JSON2.write(packedMKD) + return JSON3.write(packedMKD) end # Use general dispatch @@ -86,7 +86,7 @@ end # https://discourse.julialang.org/t/converting-string-to-datatype-with-meta-parse/33024/2 # https://discourse.julialang.org/t/is-there-a-way-to-import-modules-with-a-string/15723/6 function Base.convert(::Type{<:ManifoldKernelDensity}, str::AbstractString) - dtr = JSON2.read(str, PackedManifoldKernelDensity) + dtr = JSON3.read(str, PackedManifoldKernelDensity) return unpackDistribution(dtr) end diff --git a/src/Serialization/services/SerializingDistributions.jl b/src/Serialization/services/SerializingDistributions.jl index 86258c936..8a2e85c5f 100644 --- a/src/Serialization/services/SerializingDistributions.jl +++ b/src/Serialization/services/SerializingDistributions.jl @@ -170,18 +170,18 @@ end # FIXME ON FIRE, must deprecate nested JSON written fields in all serialization # TODO is string necessary, because unpacking templated e.g. PackedType{T} has problems, see DFG #668 function convert(::Type{String}, dtr::StringThemSamplableBeliefs) - return JSON2.write(packDistribution(dtr)) + return JSON3.write(packDistribution(dtr)) end function convert(::Type{<:SamplableBelief}, str_obj::AbstractString) # # go from stringified to generic packed (no type info) - _pck = JSON2.read(str_obj) + _pck = JSON3.read(str_obj) # NOTE, get the packed type from strong assumption that field `_type` exists in the - T = DFG.getTypeFromSerializationModule(_pck[:_type]) + T = DFG.getTypeFromSerializationModule(_pck._type) # unpack again to described packedType - pckT = JSON2.read(str_obj, T) + pckT = JSON3.read(str_obj, T) # unpack to regular <:SamplableBelief return unpackDistribution(pckT) diff --git a/src/services/FGOSUtils.jl b/src/services/FGOSUtils.jl index 19f4eb2cf..43e880623 100644 --- a/src/services/FGOSUtils.jl +++ b/src/services/FGOSUtils.jl @@ -606,7 +606,7 @@ Fetch and unpack JSON dictionary stored as a data blob. function fetchDataJSON(dfg::AbstractDFG, varsym::Symbol, lbl::Symbol) gde, rawData = getData(dfg, varsym, lbl) if gde.mimeType == "application/json/octet-stream" - JSON2.read(IOBuffer(rawData)) + JSON3.read(IOBuffer(rawData)) else error("Unknown JSON Blob format $(gde.mimeType)") end diff --git a/test/testPackingMixtures.jl b/test/testPackingMixtures.jl index ea12143e8..404807db1 100644 --- a/test/testPackingMixtures.jl +++ b/test/testPackingMixtures.jl @@ -2,7 +2,7 @@ using IncrementalInference using DistributedFactorGraphs -using JSON +# using JSON ## @@ -27,7 +27,7 @@ pf = packFactor(fg, getFactor(fg, :x0x1f1)) ## -pf_ = JSON.json(pf) +pf_ = JSON3.write(pf) ## From e6c8e808c232ef68fc58405f92412f29136c9af8 Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 15:40:37 -0800 Subject: [PATCH 2/9] compat json, rm .threadmodel calls --- Project.toml | 2 - src/Deprecated.jl | 167 +++++++++++++++++-------------------- src/services/EvalFactor.jl | 30 +------ 3 files changed, 79 insertions(+), 120 deletions(-) diff --git a/Project.toml b/Project.toml index e3668955d..5bb69a0fd 100644 --- a/Project.toml +++ b/Project.toml @@ -55,8 +55,6 @@ Distributions = "0.24, 0.25" DocStringExtensions = "0.8, 0.9" FileIO = "1" FunctionalStateMachine = "0.2.9" -JSON = "0.21" -JSON2 = "0.3" JSON3 = "1" KernelDensityEstimate = "0.5.6" Manifolds = "0.8.15" diff --git a/src/Deprecated.jl b/src/Deprecated.jl index 767157996..011642969 100644 --- a/src/Deprecated.jl +++ b/src/Deprecated.jl @@ -1,69 +1,34 @@ +## ================================================================================================ +## Manifolds.jl Consolidation +## TODO: Still to be completed and tested. +## ================================================================================================ +# struct ManifoldsVector <: Optim.Manifold +# manis::Vector{Manifold} +# end -##============================================================================== -## LEGACY, towards Sidecar -##============================================================================== - -""" -Converter: Prior -> PackedPrior::Dict{String, Any} - -FIXME see DFG #590 for consolidation with Serialization and Marshaling -""" -function convert(::Type{Dict{String, Any}}, prior::IncrementalInference.Prior) - @error("Obsolete, use pack/unpack converters instead") - z = convert(Type{Dict{String, Any}}, prior.Z) - return Packed_Factor([z], "Prior") -end - -""" -Converter: PackedPrior::Dict{String, Any} -> Prior - -FIXME see DFG #590 for consolidation on Serialization and Marshaling -""" -function convert(::Type{<:Prior}, prior::Dict{String, Any}) - @error("Obsolete, use pack/unpack converters instead") - # Genericize to any packed type next. - z = prior["measurement"][1] - z = convert(DFG.getTypeFromSerializationModule(z["distType"]), z) - return Prior(z) -end - -# more legacy, dont delete yet -function Base.getproperty(ccw::CommonConvWrapper, f::Symbol) - if f == :threadmodel - @warn "CommonConvWrapper.threadmodel is obsolete" maxlog=3 - return SingleThreaded - elseif f == :params - @warn "CommonConvWrapper.params is deprecated, use .varValsAll instead" maxlog=3 - return ccw.varValsAll - elseif f == :vartypes - @warn "CommonConvWrapper.vartypes is deprecated, use typeof.(getVariableType.(ccw.fullvariables) instead" maxlog=3 - return typeof.(getVariableType.(ccw.fullvariables)) - else - return getfield(ccw, f) - end -end - - -##============================================================================== -## Deprecate code below before v0.33 -##============================================================================== - -# export setThreadModel! - # introduced for approximate convolution operations -export SingleThreaded, MultiThreaded - -function setThreadModel!(fgl::AbstractDFG; model = IIF.SingleThreaded) - # - @error("Obsolete, ThreadModel types are no longer in use.") - # for (key, id) in fgl.fIDs - # _getCCW(fgl, key).threadmodel = model - # end - return nothing -end +# Base.getindex(mv::ManifoldsVector, inds...) = getindex(mv.mani, inds...) +# Base.setindex!(mv, X, inds...) = setindex!(mv.mani, X, inds...) -# should have been deleted in v0.31 but no harm in keeping this one a bit longer -@deprecate initManual!(w...; kw...) initVariable!(w...; kw...) +# function ManifoldsVector(fg::AbstractDFG, varIds::Vector{Symbol}) +# manis = Bool[] +# for k = varIds +# push!(manis, getVariableType(fg, k) |> getManifold) +# end +# ManifoldsVector(manis) +# end +# function Optim.retract!(manis::ManifoldsVector, x) +# for (i,M) = enumerate(manis) +# x[i] = project(M, x[i]) +# end +# return x +# end +# function Optim.project_tangent!(manis::ManifoldsVector, G, x) +# for (i, M) = enumerate(manis) +# G[i] = project(M, x[i], G) +# end +# return G +# end ##============================================================================== @@ -146,34 +111,58 @@ function solveGraphParametric2( return d, result, flatvar.idx, Σ end -## ================================================================================================ -## Manifolds.jl Consolidation -## TODO: Still to be completed and tested. -## ================================================================================================ -# struct ManifoldsVector <: Optim.Manifold -# manis::Vector{Manifold} -# end -# Base.getindex(mv::ManifoldsVector, inds...) = getindex(mv.mani, inds...) -# Base.setindex!(mv, X, inds...) = setindex!(mv.mani, X, inds...) +##============================================================================== +## Deprecate code below before v0.34 +##============================================================================== -# function ManifoldsVector(fg::AbstractDFG, varIds::Vector{Symbol}) -# manis = Bool[] -# for k = varIds -# push!(manis, getVariableType(fg, k) |> getManifold) -# end -# ManifoldsVector(manis) +# function approxConvOnElements!( +# ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, +# elements::Union{Vector{Int}, UnitRange{Int}}, +# ::Type{<:MultiThreaded}, +# _slack = nothing, +# ) where {N_, F <: AbstractRelative, S, T} +# # +# return error( +# "MultiThreaded `approxConvOnElements!` is deprecated and will soon be replaced", +# ) +# # Threads.@threads for n in elements +# # # ccwl.thrid_ = Threads.threadid() +# # ccwl.cpt[Threads.threadid()].particleidx = n + +# # # ccall(:jl_, Nothing, (Any,), "starting loop, thrid_=$(Threads.threadid()), partidx=$(ccwl.cpt[Threads.threadid()].particleidx)") +# # _solveCCWNumeric!( ccwl, _slack=_slack) +# # end +# # nothing # end -# function Optim.retract!(manis::ManifoldsVector, x) -# for (i,M) = enumerate(manis) -# x[i] = project(M, x[i]) -# end -# return x -# end -# function Optim.project_tangent!(manis::ManifoldsVector, G, x) -# for (i, M) = enumerate(manis) -# G[i] = project(M, x[i], G) -# end -# return G +# function approxConvOnElements!( +# ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, +# elements::Union{Vector{Int}, UnitRange{Int}}, +# _slack = nothing, +# ) where {N_, F <: AbstractRelative, S, T} +# # +# return approxConvOnElements!(ccwl, elements, ccwl.threadmodel, _slack) # end + +# more legacy, dont delete yet +function Base.getproperty(ccw::CommonConvWrapper, f::Symbol) + if f == :threadmodel + error("CommonConvWrapper.threadmodel is obsolete") + # return SingleThreaded + elseif f == :params + error("CommonConvWrapper.params is deprecated, use .varValsAll instead") + return ccw.varValsAll + elseif f == :vartypes + @warn "CommonConvWrapper.vartypes is deprecated, use typeof.(getVariableType.(ccw.fullvariables) instead" maxlog=3 + return typeof.(getVariableType.(ccw.fullvariables)) + else + return getfield(ccw, f) + end +end + +##============================================================================== +## Deprecate code below before v0.35 +##============================================================================== + +## \ No newline at end of file diff --git a/src/services/EvalFactor.jl b/src/services/EvalFactor.jl index 5469c3389..3677a24b2 100644 --- a/src/services/EvalFactor.jl +++ b/src/services/EvalFactor.jl @@ -14,27 +14,7 @@ Future work: function approxConvOnElements!( ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, elements::Union{Vector{Int}, UnitRange{Int}}, - ::Type{<:MultiThreaded}, - _slack = nothing, -) where {N_, F <: AbstractRelative, S, T} - # - return error( - "MultiThreaded `approxConvOnElements!` is deprecated and will soon be replaced", - ) - # Threads.@threads for n in elements - # # ccwl.thrid_ = Threads.threadid() - # ccwl.cpt[Threads.threadid()].particleidx = n - - # # ccall(:jl_, Nothing, (Any,), "starting loop, thrid_=$(Threads.threadid()), partidx=$(ccwl.cpt[Threads.threadid()].particleidx)") - # _solveCCWNumeric!( ccwl, _slack=_slack) - # end - # nothing -end - -function approxConvOnElements!( - ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, - elements::Union{Vector{Int}, UnitRange{Int}}, - ::Type{<:SingleThreaded}, + # ::Type{<:SingleThreaded}, _slack = nothing, ) where {N_, F <: AbstractRelative, S, T} # @@ -45,14 +25,6 @@ function approxConvOnElements!( return nothing end -function approxConvOnElements!( - ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, - elements::Union{Vector{Int}, UnitRange{Int}}, - _slack = nothing, -) where {N_, F <: AbstractRelative, S, T} - # - return approxConvOnElements!(ccwl, elements, ccwl.threadmodel, _slack) -end """ $SIGNATURES From 36ca7bafbe0f25e19f512b90befd15e5c853d32a Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 15:45:51 -0800 Subject: [PATCH 3/9] fix #1962, drop dfg19 compat --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5bb69a0fd..a05a1d368 100644 --- a/Project.toml +++ b/Project.toml @@ -50,7 +50,7 @@ ApproxManifoldProducts = "0.6.3" BSON = "0.2, 0.3" Combinatorics = "1.0" DataStructures = "0.16, 0.17, 0.18" -DistributedFactorGraphs = "0.19.3, 0.20" +DistributedFactorGraphs = "0.20" Distributions = "0.24, 0.25" DocStringExtensions = "0.8, 0.9" FileIO = "1" From e60fe1613c2a7e86c0884e72e8cc3d1035aa23d5 Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 16:00:59 -0800 Subject: [PATCH 4/9] fix test bug --- test/testPackingMixtures.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/testPackingMixtures.jl b/test/testPackingMixtures.jl index 404807db1..aa45ecce2 100644 --- a/test/testPackingMixtures.jl +++ b/test/testPackingMixtures.jl @@ -2,7 +2,7 @@ using IncrementalInference using DistributedFactorGraphs -# using JSON +using JSON3 ## From 7068b44bf9fe2e9a9a4ebcfee1e6aebd76eea28e Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 20:43:46 -0800 Subject: [PATCH 5/9] struct defaults for CCW --- src/entities/FactorOperationalMemory.jl | 74 ++++++++++++++++++++----- src/services/CalcFactor.jl | 43 -------------- 2 files changed, 59 insertions(+), 58 deletions(-) diff --git a/src/entities/FactorOperationalMemory.jl b/src/entities/FactorOperationalMemory.jl index fa981d9e8..1193f0a8b 100644 --- a/src/entities/FactorOperationalMemory.jl +++ b/src/entities/FactorOperationalMemory.jl @@ -80,7 +80,7 @@ Related [`CalcFactor`](@ref), [`CalcFactorMahalanobis`](@ref) """ -struct CommonConvWrapper{ +Base.@kwdef struct CommonConvWrapper{ T <: AbstractFactor, VT <: Tuple, NTP <: Tuple, @@ -101,40 +101,84 @@ struct CommonConvWrapper{ to each hypothesis evaluation event on user function via CalcFactor, #1321 """ varValsAll::NTP """ dummy cache value to be deep copied later for each of the CalcFactor instances """ - dummyCache::CT + dummyCache::CT = nothing # derived config parameters for this factor """ Factor manifold definition for frequent use (not the variables manifolds) """ - manifold::AM + manifold::AM = getManifold(usrfnc!) """ Which dimensions does this factor influence. Sensitive (mutable) to both which 'solvefor index' variable and whether the factor is partial dimension """ - partialDims::Vector{<:Integer} + partialDims::Vector{<:Integer} = collect(1:manifold_dimension(manifold)) """ is this a partial constraint as defined by the existance of factor field `.partial::Tuple` """ - partial::Bool + partial::Bool = false """ probability that this factor is wholly incorrect and should be ignored during solving """ - nullhypo::Float64 + nullhypo::Float64 = 0.0 """ inflationSpread particular to this factor (by how much to dispurse the belief initial values before numerical optimization is run). Analogous to stochastic search """ - inflation::Float64 + inflation::Float64 = 3.0 # multihypo specific field containers for recipe of hypotheses to compute """ multi hypothesis settings #NOTE no need for a parameter as type is known from `parseusermultihypo` """ - hypotheses::HP + hypotheses::HP = nothing """ categorical to select which hypothesis is being considered during convolution operation """ - certainhypo::CH + certainhypo::CH = nothing """ subsection indices to select which params should be used for this hypothesis evaluation """ - activehypo::Vector{Int} + activehypo::Vector{Int} = collect(1:length(varValsAll)) # buffers and indices to point numerical computations to specific memory locations """ user defined measurement values for each approxConv operation FIXME make type stable, JT should now be type stable if rest works. SUPER IMPORTANT, if prior=>point or relative=>tangent, see #1661 can be a Vector{<:Tuple} or more direct Vector{<: pointortangenttype} """ - measurement::Vector{MT} + measurement::Vector{MT} = Vector(Vector{Float64}()) """ which index is being solved for in params? """ - varidx::Base.RefValue{Int} + varidx::Base.RefValue{Int} = Ref(1) """ Consolidation from CPT, the actual particle being solved at this moment """ - particleidx::Base.RefValue{Int} + particleidx::Base.RefValue{Int} = Ref(1) """ working memory to store residual for optimization routines """ - res::Vector{Float64} + res::Vector{Float64} = zeros(manifold_dimension(manifold)) """ experimental feature to embed gradient calcs with ccw """ - _gradients::G + _gradients::G = nothing end +function CommonConvWrapper( + usrfnc::T, + fullvariables, #::Tuple ::Vector{<:DFGVariable}; + varValsAll::Tuple, + X::AbstractVector{P}; #TODO remove X completely + # xDim::Int = size(X, 1), + userCache::CT = nothing, + manifold = getManifold(usrfnc), + partialDims::AbstractVector{<:Integer} = 1:length(X), + partial::Bool = false, + nullhypo::Real = 0, + inflation::Real = 3.0, + hypotheses::H = nothing, + certainhypo = nothing, + activehypo = collect(1:length(varValsAll)), + measurement::AbstractVector = Vector(Vector{Float64}()), + varidx::Int = 1, + particleidx::Int = 1, + res::AbstractVector{<:Real} = zeros(manifold_dimension(manifold)), # zDim + gradients = nothing, +) where {T <: AbstractFactor, P, H, CT} + # + return CommonConvWrapper( + usrfnc, + tuple(fullvariables...), + varValsAll, + userCache, + manifold, + partialDims, + partial, + # xDim, + float(nullhypo), + float(inflation), + hypotheses, + certainhypo, + activehypo, + measurement, + Ref(varidx), + Ref(particleidx), + res, + gradients, + ) +end + # diff --git a/src/services/CalcFactor.jl b/src/services/CalcFactor.jl index c5a3e053d..44d2dde0a 100644 --- a/src/services/CalcFactor.jl +++ b/src/services/CalcFactor.jl @@ -189,49 +189,6 @@ end ## FactorOperationalMemory helper constructors ## ============================================================================================= -function CommonConvWrapper( - usrfnc::T, - fullvariables, #::Tuple ::Vector{<:DFGVariable}; - varValsAll::Tuple, - X::AbstractVector{P}; #TODO remove X completely - # xDim::Int = size(X, 1), - userCache::CT = nothing, - manifold = getManifold(usrfnc), - partialDims::AbstractVector{<:Integer} = 1:length(X), - partial::Bool = false, - nullhypo::Real = 0, - inflation::Real = 3.0, - hypotheses::H = nothing, - certainhypo = nothing, - activehypo = collect(1:length(varValsAll)), - measurement::AbstractVector = Vector(Vector{Float64}()), - varidx::Int = 1, - particleidx::Int = 1, - res::AbstractVector{<:Real} = zeros(manifold_dimension(manifold)), # zDim - gradients = nothing, -) where {T <: AbstractFactor, P, H, CT} - # - return CommonConvWrapper( - usrfnc, - tuple(fullvariables...), - varValsAll, - userCache, - manifold, - partialDims, - partial, - # xDim, - Float64(nullhypo), - inflation, - hypotheses, - certainhypo, - activehypo, - measurement, - Ref(varidx), - Ref(particleidx), - res, - gradients, - ) -end # the same as legacy, getManifold(ccwl.usrfnc!) getManifold(ccwl::CommonConvWrapper) = ccwl.manifold From 037c0c4548cb5b0d6adf44b3913448d77ad4b984 Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 21:09:16 -0800 Subject: [PATCH 6/9] attemptGradients on true only --- .../services/DispatchPackedConversions.jl | 1 + src/services/CalcFactor.jl | 39 +++++++++++-------- src/services/FactorGraph.jl | 1 + 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/src/Serialization/services/DispatchPackedConversions.jl b/src/Serialization/services/DispatchPackedConversions.jl index 9ea913f3c..2c0655523 100644 --- a/src/Serialization/services/DispatchPackedConversions.jl +++ b/src/Serialization/services/DispatchPackedConversions.jl @@ -50,6 +50,7 @@ function reconstFactorData( certainhypo = packed.certainhypo, inflation = packed.inflation, userCache, + attemptGradients = getSolverParams(dfg).attemptGradients, # Block recursion if NoSolverParams or if set to not attempt gradients. _blockRecursion= getSolverParams(dfg) isa NoSolverParams || diff --git a/src/services/CalcFactor.jl b/src/services/CalcFactor.jl index 44d2dde0a..1fa1061c0 100644 --- a/src/services/CalcFactor.jl +++ b/src/services/CalcFactor.jl @@ -366,6 +366,7 @@ function _prepCCW( inflation::Real = 0.0, solveKey::Symbol = :default, _blockRecursion::Bool = false, + attemptGradients::Bool = true, userCache::CT = nothing, ) where {T <: AbstractFactor, CT} # @@ -416,14 +417,18 @@ function _prepCCW( varTypes = getVariableType.(fullvariables) # as per struct CommonConvWrapper - gradients = attemptGradientPrep( - varTypes, - usrfnc, - _varValsAll, - multihypo, - meas_single, - _blockRecursion, - ) + _gradients = if attemptGradients + attemptGradientPrep( + varTypes, + usrfnc, + _varValsAll, + multihypo, + meas_single, + _blockRecursion, + ) + else + nothing + end # variable Types pttypes = getVariableType.(Xi) .|> getPointType @@ -432,21 +437,21 @@ function _prepCCW( @warn "_prepCCW PointType is not concrete $PointType" maxlog=50 end - return CommonConvWrapper( - usrfnc, + # PointType[], + return CommonConvWrapper(; + usrfnc! = usrfnc, fullvariables, - _varValsAll, - PointType[]; - userCache, # should be higher in args list - manifold, # should be higher in args list + varValsAll = _varValsAll, + dummyCache = userCache, + manifold, partialDims, partial, - nullhypo, - inflation, + nullhypo = float(nullhypo), + inflation = float(inflation), hypotheses = multihypo, certainhypo, measurement, - gradients, + _gradients, ) end diff --git a/src/services/FactorGraph.jl b/src/services/FactorGraph.jl index dec804161..bcd246f52 100644 --- a/src/services/FactorGraph.jl +++ b/src/services/FactorGraph.jl @@ -733,6 +733,7 @@ function getDefaultFactorData( multihypo = mhcat, nullhypo = nh, inflation, + attemptGradients = getSolverParams(dfg).attemptGradients, _blockRecursion, userCache, ) From 86e4bb0355c035161f6bb8d5d99d66dba7f8de0d Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 21:18:16 -0800 Subject: [PATCH 7/9] drop helper constructor for CCW --- src/Deprecated.jl | 44 +++++++++++++++++++++++++ src/entities/FactorOperationalMemory.jl | 44 ------------------------- test/testGradientUtils.jl | 12 +++++++ 3 files changed, 56 insertions(+), 44 deletions(-) diff --git a/src/Deprecated.jl b/src/Deprecated.jl index 011642969..edf037478 100644 --- a/src/Deprecated.jl +++ b/src/Deprecated.jl @@ -116,6 +116,50 @@ end ## Deprecate code below before v0.34 ##============================================================================== +# function CommonConvWrapper( +# usrfnc::T, +# fullvariables, #::Tuple ::Vector{<:DFGVariable}; +# varValsAll::Tuple, +# X::AbstractVector{P}; #TODO remove X completely +# # xDim::Int = size(X, 1), +# userCache::CT = nothing, +# manifold = getManifold(usrfnc), +# partialDims::AbstractVector{<:Integer} = 1:length(X), +# partial::Bool = false, +# nullhypo::Real = 0, +# inflation::Real = 3.0, +# hypotheses::H = nothing, +# certainhypo = nothing, +# activehypo = collect(1:length(varValsAll)), +# measurement::AbstractVector = Vector(Vector{Float64}()), +# varidx::Int = 1, +# particleidx::Int = 1, +# res::AbstractVector{<:Real} = zeros(manifold_dimension(manifold)), # zDim +# gradients = nothing, +# ) where {T <: AbstractFactor, P, H, CT} +# # +# return CommonConvWrapper( +# usrfnc, +# tuple(fullvariables...), +# varValsAll, +# userCache, +# manifold, +# partialDims, +# partial, +# # xDim, +# float(nullhypo), +# float(inflation), +# hypotheses, +# certainhypo, +# activehypo, +# measurement, +# Ref(varidx), +# Ref(particleidx), +# res, +# gradients, +# ) +# end + # function approxConvOnElements!( # ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}}, # elements::Union{Vector{Int}, UnitRange{Int}}, diff --git a/src/entities/FactorOperationalMemory.jl b/src/entities/FactorOperationalMemory.jl index 1193f0a8b..0d172e24c 100644 --- a/src/entities/FactorOperationalMemory.jl +++ b/src/entities/FactorOperationalMemory.jl @@ -137,48 +137,4 @@ Base.@kwdef struct CommonConvWrapper{ end -function CommonConvWrapper( - usrfnc::T, - fullvariables, #::Tuple ::Vector{<:DFGVariable}; - varValsAll::Tuple, - X::AbstractVector{P}; #TODO remove X completely - # xDim::Int = size(X, 1), - userCache::CT = nothing, - manifold = getManifold(usrfnc), - partialDims::AbstractVector{<:Integer} = 1:length(X), - partial::Bool = false, - nullhypo::Real = 0, - inflation::Real = 3.0, - hypotheses::H = nothing, - certainhypo = nothing, - activehypo = collect(1:length(varValsAll)), - measurement::AbstractVector = Vector(Vector{Float64}()), - varidx::Int = 1, - particleidx::Int = 1, - res::AbstractVector{<:Real} = zeros(manifold_dimension(manifold)), # zDim - gradients = nothing, -) where {T <: AbstractFactor, P, H, CT} - # - return CommonConvWrapper( - usrfnc, - tuple(fullvariables...), - varValsAll, - userCache, - manifold, - partialDims, - partial, - # xDim, - float(nullhypo), - float(inflation), - hypotheses, - certainhypo, - activehypo, - measurement, - Ref(varidx), - Ref(particleidx), - res, - gradients, - ) -end - # diff --git a/test/testGradientUtils.jl b/test/testGradientUtils.jl index 98c1efd5e..b33b82363 100644 --- a/test/testGradientUtils.jl +++ b/test/testGradientUtils.jl @@ -135,4 +135,16 @@ coord_2 = IIF._evalFactorTemporary!(fct, varTypes, 2, measurement, pts ) end +@testset "Enable SolverParams.attemptGradients" begin +## + +fg = generateGraph_LineStep(; + solverParams = SolverParams(; + attemptGradients=true + ) +) + +## +end + # \ No newline at end of file From ac10f380bd3ca357d0e673f15ab2ed2e8e280007 Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 21:30:55 -0800 Subject: [PATCH 8/9] fix test bug --- test/testSaveLoadDFG.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/testSaveLoadDFG.jl b/test/testSaveLoadDFG.jl index f9898da28..75441ac85 100644 --- a/test/testSaveLoadDFG.jl +++ b/test/testSaveLoadDFG.jl @@ -13,7 +13,6 @@ addFactor!(fg, [:x2;:x3;:x4], LinearRelative(Normal()), multihypo=[1.0;0.6;0.4]) saveFolder = "/tmp/dfg_test" saveDFG(fg, saveFolder) -# VERSION above 1.0.x hack required since Julia 1.0 does not seem to havfunction `splitpath` retDFG = initfg() retDFG = loadDFG!(retDFG, saveFolder) @@ -56,6 +55,7 @@ saveFolder = "/tmp/dfg_test" saveDFG(fg, saveFolder) retDFG = initfg() +getSolverParams(retDFG).attemptGradients = true loadDFG!(retDFG, saveFolder) Base.rm(saveFolder*".tar.gz") From af67e30fdcf7b30e8e54a5a5fc046439ba6e5ae2 Mon Sep 17 00:00:00 2001 From: dehann Date: Fri, 10 Mar 2023 21:35:59 -0800 Subject: [PATCH 9/9] fix test --- test/testGradientUtils.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/testGradientUtils.jl b/test/testGradientUtils.jl index b33b82363..f26fa5cf7 100644 --- a/test/testGradientUtils.jl +++ b/test/testGradientUtils.jl @@ -138,7 +138,7 @@ end @testset "Enable SolverParams.attemptGradients" begin ## -fg = generateGraph_LineStep(; +fg = generateGraph_LineStep(4; solverParams = SolverParams(; attemptGradients=true )