diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 47ae6844f..c3524460b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: workflow_dispatch: jobs: test-stable: - name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ matrix.group }} + name: JL${{ matrix.version }} - ${{ matrix.arch }} - ${{ matrix.group }} - ${{ matrix.os }} runs-on: ${{ matrix.os }} env: JULIA_PKG_SERVER: "" @@ -19,7 +19,6 @@ jobs: version: - '1.6' - '1.8' - - 'nightly' os: - ubuntu-latest arch: @@ -47,19 +46,26 @@ jobs: if: ${{ matrix.version != 'nightly' }} upstream-dev: - #if: github.ref != 'refs/heads/release**' name: Upstream Dev runs-on: ubuntu-latest env: JULIA_PKG_SERVER: "" + strategy: + fail-fast: false + matrix: + arch: + - x64 + version: + - '1.8' + group: + - 'basic_functional_group' + - 'test_cases_group' steps: - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 with: - version: 1.7 - arch: x64 - + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} - uses: actions/cache@v1 env: cache-name: cache-artifacts @@ -70,14 +76,44 @@ jobs: ${{ runner.os }}-test-${{ env.cache-name }}- ${{ runner.os }}-test- ${{ runner.os }}- - - run: | git config --global user.name Tester git config --global user.email te@st.er - - name: Upstream Dev + env: + IIF_TEST_GROUP: ${{ matrix.group }} run: | julia --project=@. --check-bounds=yes -e 'using Pkg; Pkg.add(PackageSpec(name="ApproxManifoldProducts",rev="master"));' julia --project=@. --check-bounds=yes -e 'using Pkg; Pkg.add(PackageSpec(name="DistributedFactorGraphs",rev="master"));' julia --project=@. --check-bounds=yes -e 'using Pkg; Pkg.test("IncrementalInference"; coverage=false)' shell: bash + + test-debug-group: + needs: [ upstream-dev ] + name: JL${{ matrix.version }} - ${{ matrix.group }} - ${{ matrix.os }} + runs-on: ${{ matrix.os }} + env: + JULIA_PKG_SERVER: "" + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + version: + - '1.8' + arch: + - x64 + group: + - 'tmp_debug_group' + continue-on-error: true + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@v1 + with: + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - uses: julia-actions/cache@v1 + - uses: julia-actions/julia-buildpkg@v1 + - uses: julia-actions/julia-runtest@v1 + env: + IIF_TEST_GROUP: ${{ matrix.group }} diff --git a/LICENSE b/LICENSE index 017c0cdf2..8025ac9cc 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2019 +Copyright (c) 2019 - 2022 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Project.toml b/Project.toml index 44b97320e..592c1a184 100644 --- a/Project.toml +++ b/Project.toml @@ -2,7 +2,7 @@ name = "IncrementalInference" uuid = "904591bb-b899-562f-9e6f-b8df64c7d480" keywords = ["MM-iSAMv2", "Bayes tree", "junction tree", "Bayes network", "variable elimination", "graphical models", "SLAM", "inference", "sum-product", "belief-propagation"] desc = "Implements the Multimodal-iSAMv2 algorithm." -version = "0.30.5" +version = "0.30.6" [deps] ApproxManifoldProducts = "9bbbb610-88a1-53cd-9763-118ce10c1f89" diff --git a/src/ManifoldsExtentions.jl b/src/ManifoldsExtentions.jl index d32683fc5..514ac5ff2 100644 --- a/src/ManifoldsExtentions.jl +++ b/src/ManifoldsExtentions.jl @@ -12,6 +12,30 @@ function Manifolds.manifold_dimension(M::NPowerManifold) return manifold_dimension(M.manifold) * M.N end +function Manifolds.get_vector!(M::NPowerManifold, Y, p, c, B::AbstractBasis) + dim = manifold_dimension(M.manifold) + rep_size = representation_size(M.manifold) + v_iter = 1 + for i in Manifolds.get_iterator(M) + Y[i] = get_vector( + M.manifold, + Manifolds._read(M, rep_size, p, i), + view(c,v_iter:(v_iter + dim - 1)), + B, + ) + v_iter += dim + end + return Y +end + +function Manifolds.exp!(M::NPowerManifold, q, p, X) + rep_size = representation_size(M.manifold) + for i in Manifolds.get_iterator(M) + q[i] = exp(M.manifold, Manifolds._read(M, rep_size, p, i), Manifolds._read(M, rep_size, X, i)) + end + return q +end + ## ================================================================================================ ## ArrayPartition getPointIdentity (identity_element) ## ================================================================================================ diff --git a/src/ParametricUtils.jl b/src/ParametricUtils.jl index a0286538c..e809c5b47 100644 --- a/src/ParametricUtils.jl +++ b/src/ParametricUtils.jl @@ -13,11 +13,12 @@ Related [`CalcFactor`](@ref) """ -struct CalcFactorMahalanobis{N, S<:Union{Nothing,AbstractMaxMixtureSolver}} +struct CalcFactorMahalanobis{N, D, L, S<:Union{Nothing,AbstractMaxMixtureSolver}} + faclbl::Symbol calcfactor!::CalcFactor varOrder::Vector{Symbol} meas::NTuple{N, <:AbstractArray} - iΣ::NTuple{N, Matrix{Float64}} + iΣ::NTuple{N, SMatrix{D,D,Float64,L}} specialAlg::S end # struct CalcFactorMahalanobis{CF<:CalcFactor, S<:Union{Nothing,AbstractMaxMixtureSolver}, N} @@ -135,7 +136,7 @@ function CalcFactorMahalanobis(fg, fct::DFGFactor) _meas, _iΣ = getMeasurementParametric(fac_func) M = getManifold(getFactorType(fct)) - + dims = manifold_dimension(M) ϵ = getPointIdentity(M) if typeof(_meas) <: Tuple @@ -144,12 +145,12 @@ function CalcFactorMahalanobis(fg, fct::DFGFactor) elseif fac_func isa ManifoldPrior _measX = (_meas,) else - _measX = (hat(M, ϵ, _meas),) + _measX = (convert(typeof(ϵ), get_vector(M, ϵ, _meas, DefaultOrthogonalBasis())),) end meas = fac_func isa AbstractPrior ? map(X->exp(M, ϵ, X), _measX) : _measX - iΣ = typeof(_iΣ) <: Tuple ? _iΣ : (_iΣ,) + iΣ = convert.(SMatrix{dims,dims}, typeof(_iΣ) <: Tuple ? _iΣ : (_iΣ,)) cache = preambleCache(fg, getVariable.(fg, varOrder), getFactorType(fct)) @@ -172,14 +173,14 @@ function CalcFactorMahalanobis(fg, fct::DFGFactor) special = nothing end - return CalcFactorMahalanobis(calcf, varOrder, meas, iΣ, special) + return CalcFactorMahalanobis(fct.label, calcf, varOrder, meas, iΣ, special) end # This is where the actual parametric calculation happens, CalcFactor equivalent for parametric -function (cfp::CalcFactorMahalanobis{1,Nothing})(variables...) # AbstractArray{T} where T <: Real +function (cfp::CalcFactorMahalanobis{1,D,L,Nothing})(variables...) where {D,L}# AbstractArray{T} where T <: Real # call the user function - res = cfp.calcfactor!(cfp.meas[1], variables...) + res = cfp.calcfactor!(cfp.meas..., variables...) # 1/2*log(1/( sqrt(det(Σ)*(2pi)^k) )) ## k = dim(μ) return res' * cfp.iΣ[1] * res end @@ -193,6 +194,21 @@ function calcFactorMahalanobisDict(fg) return calcFactors end +# Base.eltype(::Type{<:CalcFactorMahalanobis}) = CalcFactorMahalanobis + +# function calcFactorMahalanobisArray(fg) +# cfps = map(getFactors(fg)) do fct +# CalcFactorMahalanobis(fg, fct) +# end +# types = collect(Set(typeof.(cfps))) +# cfparr = ArrayPartition(map(x->Vector{x}(), types)...) +# for cfp in cfps +# idx = findfirst(==(typeof(cfp)), types) +# push!(cfparr.x[idx], cfp) +# end +# return cfparr +# end + ## ================================================================================================ ## ================================================================================================ ## New Parametric refactor WIP @@ -290,6 +306,7 @@ struct GraphSolveContainer varTypesIds::OrderedDict{DataType, Vector{Symbol}} cfdict::OrderedDict{Symbol, CalcFactorMahalanobis} varOrderDict::OrderedDict{Symbol, Tuple{Int, Vararg{Int,}}} + # cfarr::AbstractVector # TODO maybe <: AbstractVector(CalcFactorMahalanobis) end @@ -309,6 +326,8 @@ function GraphSolveContainer(fg) varOrderDict[fid] = tuple(var_idx...) end + # cfarr = calcFactorMahalanobisArray(fg) + # return GraphSolveContainer(M, buffs, varTypes, varTypesIds, cfd, varOrderDict, cfarr) return GraphSolveContainer(M, buffs, varTypes, varTypesIds, cfd, varOrderDict) end @@ -322,7 +341,7 @@ function getGraphSolveCache!(gsc::GraphSolveContainer, ::Type{T}) where T<:Real return val end -function _toPoints2!(@nospecialize(M::AbstractManifold), buffs::GraphSolveBuffers{T,U}, Xc::Vector{T}) where {T,U} +function _toPoints2!(M::AbstractManifold, buffs::GraphSolveBuffers{T,U}, Xc::Vector{T}) where {T,U} ϵ = buffs.ϵ p = buffs.p X = buffs.X @@ -331,9 +350,32 @@ function _toPoints2!(@nospecialize(M::AbstractManifold), buffs::GraphSolveBuffer return p::U end -cost_cfp(cfp::CalcFactorMahalanobis, p, vi::NTuple{1,Int}) = cfp(p[vi[1]]) -cost_cfp(cfp::CalcFactorMahalanobis, p, vi::NTuple{2,Int}) = cfp(p[vi[1]], p[vi[2]]) -cost_cfp(cfp::CalcFactorMahalanobis, p, vi::NTuple{3,Int}) = cfp(p[vi[1]], p[vi[2]], p[vi[3]]) +cost_cfp(@nospecialize(cfp::CalcFactorMahalanobis), @nospecialize(p::AbstractArray), vi::NTuple{1,Int}) = cfp(p[vi[1]]) +cost_cfp(@nospecialize(cfp::CalcFactorMahalanobis), @nospecialize(p::AbstractArray), vi::NTuple{2,Int}) = cfp(p[vi[1]], p[vi[2]]) +cost_cfp(@nospecialize(cfp::CalcFactorMahalanobis), @nospecialize(p::AbstractArray), vi::NTuple{3,Int}) = cfp(p[vi[1]], p[vi[2]], p[vi[3]]) + +# function (gsc::GraphSolveContainer)(f::Vector{T}, Xc::Vector{T}, ::Val{true}) where T <: Real +# # +# buffs = getGraphSolveCache!(gsc, T) + +# cfdict = gsc.cfdict +# varOrderDict = gsc.varOrderDict + +# M = gsc.M + +# p = _toPoints2!(M, buffs, Xc) + + +# for (i,(fid, cfp)) in enumerate(cfdict) +# varOrder_idx = varOrderDict[fid] + +# # call the user function +# f[i] = cost_cfp(cfp, p, varOrder_idx)/2 +# end + +# return f +# end + # the cost function function (gsc::GraphSolveContainer)(Xc::Vector{T}) where T <: Real @@ -347,6 +389,27 @@ function (gsc::GraphSolveContainer)(Xc::Vector{T}) where T <: Real p = _toPoints2!(M, buffs, Xc) + obj = mapreduce(+, cfdict) do (fid, cfp) + + varOrder_idx = varOrderDict[fid] + # call the user function + cost_cfp(cfp, p, varOrder_idx) + end + + return obj/2 +end + +function (gsc::GraphSolveContainer)(Xc::Vector{T}, ::MultiThreaded) where T <: Real + # + buffs = getGraphSolveCache!(gsc, T) + + cfdict = gsc.cfdict + varOrderDict = gsc.varOrderDict + + M = gsc.M + + p = _toPoints2!(M, buffs, Xc) + #NOTE multi threaded option obj = zeros(T,(Threads.nthreads())) Threads.@threads for fid in collect(keys(cfdict)) @@ -673,6 +736,24 @@ end ## ================================================================================================ ## SANDBOX of usefull development functions to be cleaned up +""" + $SIGNATURES +Update the parametric solver data value and covariance. +""" +function updateSolverDataParametric! end + +function updateSolverDataParametric!(v::DFGVariable, val::AbstractArray{<:Real}, cov::Matrix; solveKey::Symbol=:parametric ) + vnd = getSolverData(v, solveKey) + return updateSolverDataParametric!(vnd, val, cov) +end + +function updateSolverDataParametric!(vnd::VariableNodeData, val::AbstractArray{<:Real}, cov::Matrix) + # fill in the variable node data value + vnd.val[1] = val + #calculate and fill in covariance + vnd.bw .= cov + return vnd +end """ $SIGNATURES @@ -696,8 +777,18 @@ end """ $SIGNATURES Initialize the parametric solver data from a different solution in `fromkey`. + +DevNotes +- TODO, keyword `force` not wired up yet. """ -function initParametricFrom!(fg::AbstractDFG, fromkey::Symbol = :default; parkey::Symbol = :parametric, onepoint=false) +function initParametricFrom!( + fg::AbstractDFG, + fromkey::Symbol = :default; + parkey::Symbol = :parametric, + onepoint=false, + force::Bool=false + ) + # if onepoint for v in getVariables(fg) fromvnd = getSolverData(v, fromkey) @@ -736,19 +827,16 @@ end $SIGNATURES Update the fg from solution in vardict and add MeanMaxPPE (all just mean). Usefull for plotting """ -function updateParametricSolution!(sfg, vardict) +function updateParametricSolution!(sfg, vardict; solveKey::Symbol=:parametric) for (v,val) in vardict - vnd = getSolverData(getVariable(sfg, v), :parametric) - # fill in the variable node data value - vnd.val[1] = val.val - #calculate and fill in covariance - vnd.bw = val.cov + vnd = getSolverData(getVariable(sfg, v), solveKey) + # Update the variable node data value and covariance + updateSolverDataParametric!(vnd, val.val, val.cov) #fill in ppe as mean - Xc = getCoordinates(getVariableType(sfg, v), val.val) - - ppe = MeanMaxPPE(:parametric, Xc, Xc, Xc) - getPPEDict(getVariable(sfg, v))[:parametric] = ppe + Xc = collect(getCoordinates(getVariableType(sfg, v), val.val)) + ppe = MeanMaxPPE(solveKey, Xc, Xc, Xc) + getPPEDict(getVariable(sfg, v))[solveKey] = ppe end end @@ -774,6 +862,81 @@ function createMvNormal(v::DFGVariable, key=:parametric) end end +#TODO this is still experimental and a POC +function getInitOrderParametric(fg; startIdx::Symbol = lsfPriors(fg)[1]) + + order = DFG.traverseGraphTopologicalSort(fg, startIdx) + filter!(order) do l + isVariable(fg, l) + end + return order + +end + +function autoinitParametric!( + fg, + varorderIds=getInitOrderParametric(fg); + reinit=false, + algorithm=Optim.NelderMead, + algorithmkwargs = (initial_simplex = Optim.AffineSimplexer(0.025, 0.1),) + ) + @showprogress for vIdx=varorderIds + autoinitParametric!(fg, vIdx; reinit, algorithm, algorithmkwargs) + end + return nothing +end + +function autoinitParametric!( + dfg::AbstractDFG, + initme::Symbol; + kwargs... + ) + autoinitParametric!(dfg, getVariable(dfg,initme); kwargs...) +end + + +function autoinitParametric!( + dfg::AbstractDFG, + xi::DFGVariable; + solveKey = :parametric, + reinit::Bool=false, + kwargs... + ) + # + + initme = getLabel(xi) + vnd = getSolverData(xi, solveKey) + # don't initialize a variable more than once + if reinit || !isInitialized(xi, solveKey) + + # frontals - initme + # separators - inifrom + + initfrom = ls2(dfg, initme) + filter!(initfrom) do vl + isInitialized(dfg, vl, solveKey) + end + + vardict, result, flatvars, Σ = solveConditionalsParametric(dfg, [initme], initfrom; kwargs...) + + val,cov = vardict[initme] + + updateSolverDataParametric!(vnd, val, cov) + + vnd.initialized = true + #fill in ppe as mean + Xc = getCoordinates(getVariableType(xi), val) + ppe = MeanMaxPPE(:parametric, Xc, Xc, Xc) + getPPEDict(xi)[:parametric] = ppe + + # updateVariableSolverData!(dfg, xi, solveKey, true; warn_if_absent=false) + # updateVariableSolverData!(dfg, xi.label, getSolverData(xi, solveKey), :graphinit, true, Symbol[]; warn_if_absent=false) + else + result = nothing + end + + return result#isInitialized(xi, solveKey) +end # diff --git a/src/services/GraphInit.jl b/src/services/GraphInit.jl index 8ed3593fd..e244c6b54 100644 --- a/src/services/GraphInit.jl +++ b/src/services/GraphInit.jl @@ -70,60 +70,6 @@ function factorCanInitFromOtherVars(dfg::AbstractDFG, end -function autoinitParametric!( - dfg::AbstractDFG, - xi::DFGVariable; - solveKey = :parametric, - reinit::Bool=false, - ) - # - - initme = getLabel(xi) - vnd = getSolverData(xi, solveKey) - # don't initialize a variable more than once - if reinit || !isInitialized(xi, solveKey) - - # frontals - initme - # separators - inifrom - - initfrom = ls2(dfg, initme) - filter!(initfrom) do vl - isInitialized(dfg, vl, solveKey) - end - - vardict, result, flatvars, Σ = solveConditionalsParametric(dfg, [initme], initfrom) - - val,cov = vardict[initme] - - # fill in the variable node data value - vnd.val[1] = val - #calculate and fill in covariance - vnd.bw = cov - - vnd.initialized = true - #fill in ppe as mean - Xc = getCoordinates(getVariableType(xi), val) - ppe = MeanMaxPPE(:parametric, Xc, Xc, Xc) - getPPEDict(xi)[:parametric] = ppe - - # updateVariableSolverData!(dfg, xi, solveKey, true; warn_if_absent=false) - # updateVariableSolverData!(dfg, xi.label, getSolverData(xi, solveKey), :graphinit, true, Symbol[]; warn_if_absent=false) - else - result = nothing - end - - return result#isInitialized(xi, solveKey) -end - - -function autoinitParametric!( - dfg::AbstractDFG, - initme::Symbol; - kwargs... - ) - autoinitParametric!(dfg, getVariable(dfg,initme); kwargs...) -end - """ $(SIGNATURES) @@ -452,13 +398,17 @@ function initAll!(dfg::AbstractDFG, # May have to first add the solveKey VNDs if they are not yet available for sym in syms - var = getVariable(dfg, sym) + vari = getVariable(dfg, sym) + varType = getVariableType(vari) |> _variableType # does SolverData exist for this solveKey? - if !( solveKey in listSolveKeys(var) ) - varType = getVariableType(var) + vsolveKeys = listSolveKeys(vari) + if !_parametricInit && !( solveKey in vsolveKeys ) # accept complete defaults for a novel solveKey - setDefaultNodeData!(var, 0, N, getDimension(varType), solveKey=solveKey, - initialized=false, varType=varType, dontmargin=false) + setDefaultNodeData!(vari, 0, N, getDimension(varType); solveKey, + initialized=false, varType) + end + if _parametricInit && !(:parametric in vsolveKeys) + setDefaultNodeDataParametric!(vari, varType; initialized=false) end end diff --git a/src/services/MaxMixture.jl b/src/services/MaxMixture.jl index 03092fc25..42b834b5c 100644 --- a/src/services/MaxMixture.jl +++ b/src/services/MaxMixture.jl @@ -53,7 +53,7 @@ end # end -function (cfp::CalcFactorMahalanobis{N, MaxMixture})(variables...) where N +function (cfp::CalcFactorMahalanobis{N, D, L, MaxMixture})(variables...) where {N,D,L} r = [_calcFactorMahalanobis(cfp, cfp.meas[i], cfp.iΣ[i], variables...) for i = 1:length(cfp.meas)] @@ -82,7 +82,7 @@ struct MaxNullhypo <: AbstractMaxMixtureSolver nullhypo::Float64 end -function (cfp::CalcFactorMahalanobis{N, MaxMultihypo})(X1, L1, L2) where N +function (cfp::CalcFactorMahalanobis{N, D, L, MaxMultihypo})(X1, L1, L2) where {N,D,L} mh = cfp.specialAlg.multihypo @assert length(mh) == 3 "multihypo $mh not supported with parametric, length should be 3" @assert mh[1] == 0 "multihypo $mh not supported with parametric, first should be 0" @@ -100,7 +100,7 @@ function (cfp::CalcFactorMahalanobis{N, MaxMultihypo})(X1, L1, L2) where N end -function (cfp::CalcFactorMahalanobis{N, MaxNullhypo})(X1, X2) where N +function (cfp::CalcFactorMahalanobis{N, D, L, MaxNullhypo})(X1, X2) where {N,D,L} nh = cfp.specialAlg.nullhypo @assert nh > 0 "nullhypo $nh not as expected" diff --git a/src/services/TetherUtils.jl b/src/services/TetherUtils.jl index 5fb5abd3a..786c7c6c0 100644 --- a/src/services/TetherUtils.jl +++ b/src/services/TetherUtils.jl @@ -109,7 +109,9 @@ Notes - Returns mean value as coordinates DevNotes -- TODO consolidate with similar `approxConv` +- TODO consolidate with similar [`approxConvBelief`](@ref) +- TODO compare consolidate with [`solveParametricConditionals`](@ref) +- TODO compare consolidate with [`solveFactorParametric`](@ref) Related: diff --git a/test/runtests.jl b/test/runtests.jl index 76f3c7c7d..2c907c409 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -4,6 +4,10 @@ TEST_GROUP = get(ENV, "IIF_TEST_GROUP", "all") # temporarily moved to start (for debugging) #... +if TEST_GROUP in ["all", "tmp_debug_group"] +include("testMultiHypo3Door.jl") +include("priorusetest.jl") +end if TEST_GROUP in ["all", "basic_functional_group"] include("testSphereMani.jl") @@ -73,8 +77,6 @@ include("testEuclidDistance.jl") end if TEST_GROUP in ["all", "test_cases_group"] -include("testMultiHypo3Door.jl") -include("priorusetest.jl") include("testnullhypothesis.jl") include("testVariousNSolveSize.jl") include("testExplicitMultihypo.jl")