From 7a7d6a97d0528856ce292c10223101b84fb981a4 Mon Sep 17 00:00:00 2001
From: dehann <dehann.fourie@gmail.com>
Date: Fri, 11 Feb 2022 20:22:40 -0500
Subject: [PATCH 1/3] rm depr pre v0.27

---
 src/Deprecated.jl | 456 ----------------------------------------------
 1 file changed, 456 deletions(-)

diff --git a/src/Deprecated.jl b/src/Deprecated.jl
index d8d606c7e..bd76586e8 100644
--- a/src/Deprecated.jl
+++ b/src/Deprecated.jl
@@ -214,462 +214,6 @@ end
 @deprecate generateCanonicalFG_TestSymbolic(w...;kw...) generateGraph_TestSymbolic(w...;kw...)
 
 
-##==============================================================================
-## Deprecate code below before v0.27
-##==============================================================================
-
-convert(::Type{<:SamplableBelief}, obj::PackedUniform) = unpackDistribution(obj)
-
-
-Base.convert(::Type{<:Tuple}, ::InstanceType{Manifolds.Euclidean{Tuple{N}, ℝ}} ) where N = tuple([:Euclid for i in 1:N]...)
-Base.convert(::Type{<:Tuple}, ::InstanceType{Manifolds.Circle{ℝ}})  = error("#FIXME")#(:Circular,)
-Base.convert(::Type{<:Tuple}, ::InstanceType{Manifolds.RealCircleGroup})  = (:Circular,)
-
-@deprecate manikde!(pts::AbstractVector, bws::Vector{<:Real}, variableType::Union{InstanceType{<:InferenceVariable}, InstanceType{<:AbstractFactor}} ) manikde!(variableType,pts,bws)
-@deprecate manikde!(pts::AbstractVector, varType::Union{InstanceType{<:InferenceVariable}, InstanceType{<:AbstractFactor}}) manikde!(varType, pts)
-
-# function getSample(cf::CalcFactor{<:AbstractRelativeRoots})
-#   M = getManifold(cf.factor)
-#   if hasfield(typeof(cf.factor), :Z)
-#     return sampleTangent(M, cf.factor.Z)
-#   else
-#     error("""Factor $(typeof(cf.factor)) does not have a field `Z`, to use the default `getSample` method, use `Z` for the measurement. 
-#               Alternatively, provide a `getSample` method. See IIF issue #1441 and Custom Factors in the Caesar documentation.""")
-#   end
-# end
-
-# function getSample(cf::CalcFactor{<:CircularCircular})
-#   # return (sampleTangent(getManifold(cf.factor), cf.factor.Z), )
-#   # FIXME workaround for issue #TBD with manifolds CircularGroup
-#   return sampleTangent(getManifold(cf.factor), cf.factor.Z, [0.0])
-# end
-
-# function getSample(cf::CalcFactor{<:LinearRelative})
-#   # _samplemakevec(z::Real) = [z;]
-#   # _samplemakevec(z::AbstractVector{<:Real}) = z
-#   return sampleTangent(getManifold(cf.factor), cf.factor.Z)
-# end
-
-# function getSample(cf::CalcFactor{<:EuclidDistance})
-#   rand(cf.factor.Z, 1)
-# end
-
-@deprecate CalcFactor(x1,x2,x3,x4,x5,x6) CalcFactor(x1,x2,x3,x4,x5,x6, true)
-
-@deprecate ensureAllInitialized!(w...;kw...) initAll!(w...;kw...)
-
-# """
-#     $SIGNATURES
-
-# Sample points from a regular grid scalar field level set.
-
-# Notes
-# - modifies first argument roi to save on memory allocations
-# - user should duplicate if with a deepcopy if needed
-# """
-# function sampleLevelSetGaussian!( roi::AbstractMatrix{<:Real},
-#                                   sigma::Real,
-#                                   x_grid::AbstractVector{<:Real}, 
-#                                   y_grid::AbstractVector{<:Real};
-#                                   sigma_scale::Real=3  )
-#   #
-#   # make Gaussian
-#   roi .^= 2
-#   roi .*= 0.5/(sigma^2)
-#     # truncate at sigma_scale*sigma
-#     _roi = thres .- roi
-
-#   sampleHeatmap(roi, x_grid, y_grid, sigma_scale^2)
-# end
-
-
-# @deprecate getLevelSetSigma(data::AbstractMatrix{<:Real}, level::Real, w...; kw...) sampleLevelSetGaussian!(data.-level, w...; kw...)
-
-getOutNeighbors(w...;kw...) = error("Obsolete, use DFG.getNeighbors instead.")
-
-# # TODO -- there should be a better way, without retrieving full vertex
-# # TODO -- Deprecated for DFG -- must update
-# function getOutNeighbors(dfg::T, v::V; needdata::Bool=false, ready::Union{Nothing, Int}=nothing, backendset::Union{Nothing, Int}=nothing)::Vector{Symbol} where {T <: AbstractDFG, V <: DFGNode}
-#   @warn "TODO: needdata is currently ignored. Symbols are returned."
-#   nodes = getNeighbors(dfg, v, ready=ready, backendset=backendset)
-#   return nodes
-# end
-# function getOutNeighbors(dfg::T, vertSym::Symbol; needdata::Bool=false, ready::Int=1, backendset::Int=1 )::Vector{Symbol} where {T <: AbstractDFG, V <: DFGNode}
-#   @warn "TODO: needdata is currently ignored. Symbols are returned."
-#   nodes = getNeighbors(dfg, vertSym, ready=ready, backendset=backendset)
-#   return nodes
-# end
-
-# SolverParams(;dimID::Int=0,
-#               registeredModuleFunctions=nothing,
-#               reference=nothing,
-#               stateless::Bool=false,
-#               qfl::Int=99999999999,
-#               isfixedlag::Bool=false,
-#               limitfixeddown::Bool=false,
-#               incremental::Bool=true,
-#               useMsgLikelihoods::Bool=false,
-#               upsolve::Bool=true,
-#               downsolve::Bool=true,
-#               drawtree::Bool=false,
-#               drawCSMIters::Bool=true,
-#               showtree::Bool=false,
-#               drawtreerate::Float64=0.5,
-#               dbg::Bool=false,
-#               async::Bool=false,
-#               limititers::Int=500,
-#               N::Int=100,
-#               multiproc::Bool=1 < nprocs(),
-#               logpath::String="/tmp/caesar/$(now())",
-#               graphinit::Bool=true,
-#               treeinit::Bool=false,
-#               limittreeinit_iters::Int=10,
-#               algorithms::Vector{Symbol}=[:default],
-#               spreadNH::Real=3.0,
-#               inflation::Real=5.0,
-#               inflateCycles::Int=3,
-#               gibbsIters::Int=3,
-#               maxincidence::Int=500,
-#               alwaysFreshMeasurements::Bool=true,
-#               attemptGradients::Bool=true,
-#               devParams::Dict{Symbol,String}=Dict{Symbol,String}()
-#             ) = begin useMsgLikelihoods==true && @warn "useMsgLikelihoods is under development, use with care, see #1010"
-#                 SolverParams( dimID,
-#                               registeredModuleFunctions,
-#                               reference,
-#                               stateless,
-#                               qfl,
-#                               isfixedlag,
-#                               limitfixeddown,
-#                               incremental,
-#                               useMsgLikelihoods,
-#                               upsolve,
-#                               downsolve,
-#                               drawtree,
-#                               drawCSMIters,
-#                               showtree,
-#                               drawtreerate,
-#                               dbg,
-#                               async,
-#                               limititers,
-#                               N,
-#                               multiproc,
-#                               logpath,
-#                               graphinit,
-#                               treeinit,
-#                               limittreeinit_iters,
-#                               algorithms,
-#                               spreadNH,
-#                               inflation,
-#                               inflateCycles,
-#                               gibbsIters,
-#                               maxincidence,
-#                               alwaysFreshMeasurements,
-#                               attemptGradients,
-#                               devParams )
-#             end
-#
-
-@deprecate setVariableInferDim!(w...;kw...) setIPC!(w...;kw...)
-
-# moved to IncrementalInference/attic
-# include("CSMOccuranceUtils.jl")
-
-@deprecate prepgenericconvolution(w...;kw...) _prepCCW(w...;kw...)
-
-function getSample(cf::CalcFactor, N::Int) 
-  # Base.depwarn("`getSample(cf, N)` is deprecated, use `getSample(cf)`", :getSample)
-  error("`getSample(cf, N)` is deprecated, use `getSample(cf)`")
-end
-
-# """
-#     $SIGNATURES
-
-# Build an approximate density `[Y|X,DX,.]=[X|Y,DX][DX|.]` as proposed by the conditional convolution.
-
-# Notes
-# - Assume both are on circular manifold, `manikde!(pts, (:Circular,))`
-# """
-# function approxConvCircular(pX::ManifoldKernelDensity, 
-#                             pDX::ManifoldKernelDensity; N::Int=100)
-#   #
-
-#   # building basic factor graph
-#   tfg = initfg()
-#   addVariable!(tfg, :s1, Sphere1)
-#   addVariable!(tfg, :s2, Sphere1)
-#   addFactor!(tfg, [:s1;:s2], Sphere1Sphere1(pDX), graphinit=false)
-#   initManual!(tfg,:s1, pX)
-
-#   # solve for outgoing proposal value
-#   approxConv(tfg,:s1s2f1,:s2)
-# end
-
-# function approxConvCircular(pX::ManifoldKernelDensity, 
-#                             pDX::SamplableBelief; N::Int=100)
-#   #
-#   pts = reshape(rand(pDX, N), 1, :)
-#   pC = manikde!(pts, Sphere1)
-#   approxConvCircular(pX, pC)
-# end
-
-
-# function approxConvCircular(pX::SamplableBelief, 
-#                             pDX::ManifoldKernelDensity; N::Int=100)
-#   #
-#     pts = reshape(rand(pX, N), 1, :)
-#   pC = manikde!(pts, Sphere1)
-#   approxConvCircular(pC, pDX)
-# end
-
-
-
-@deprecate printGraphSummary(dfg::AbstractDFG, logger=ConsoleLogger()) show(logger.stream, MIME("text/plain"), dfg)
-@deprecate printSummary(dfg::AbstractDFG, logger=ConsoleLogger()) show(logger.stream, MIME("text/plain"), dfg)
-
-
-# """
-#     $SIGNATURES
-
-# Print basic summary of graph to `logger=ConsoleLogger()`.
-# """
-# function printGraphSummary(dfg::G, logger=ConsoleLogger())::Nothing where {G <: AbstractDFG}
-#     vars = ls(dfg)
-#     fcts = lsf(dfg)
-
-#     prio = lsfPriors(dfg)
-
-#     isinit = map(x->isInitialized(dfg,x), vars)
-#     infdim = map(x->getVariableInferredDim(dfg, x), vars)
-#     numedges = map(v->length(ls(dfg, v)), vars)
-#     numfed = map(fc->length(ls(dfg, fc)), fcts)
-#     vardims = map(v->getDimension(getVariable(dfg, v)), vars)
-#     fctdims = map(v->getDimension(getFactor(dfg, v)), fcts)
-#     priodims = map(v->getDimension(getFactor(dfg, v)), prio)
-
-#     with_logger(logger) do
-#       @info "Distributed Factor Graph summary:"
-#       @info "  num variables:    $(length(vars))"
-#       @info "  num factors:      $(length(fcts)), w/ $(length(prio)) priors"
-#       @info "  var initialized:  $(sum(isinit))"
-#       @info ""
-#       @info "  var num edges: min. $(minimum(numedges)) | mean $(round(Statistics.mean(numedges),digits=2)) | 90% $(round(quantile(numedges,0.9),digits=2)) | max. $(maximum(numedges))"
-#       @info "  fct num edges: min. $(minimum(numfed)) | mean $(round(Statistics.mean(numfed),digits=2)) | 90% $(round(quantile(numfed,0.9),digits=2)) | max. $(maximum(numfed))"
-#       @info "  Variable dims: min. $(minimum(vardims)) | mean $(round(Statistics.mean(vardims),digits=2)) | 90% $(round(quantile(vardims,0.9),digits=2)) | max. $(maximum(vardims))"
-#       @info "  Factor dims:   min. $(minimum(fctdims)) | mean $(round(Statistics.mean(fctdims),digits=2)) | 90% $(round(quantile(fctdims,0.9),digits=2)) | max. $(maximum(fctdims))"
-#       @info "  Prior dimens:  min. $(minimum(priodims)) | mean $(round(Statistics.mean(priodims),digits=2)) | 90% $(round(quantile(priodims,0.9),digits=2)) | max. $(maximum(priodims))"
-#       @info "  var infr'dims: min. $(minimum(infdim)) | mean $(round(Statistics.mean(infdim),digits=2)) | 90% $(round(quantile(infdim,0.9),digits=2)) | max. $(maximum(infdim))"
-#     end
-#     nothing
-# end
-
-# """
-#     $SIGNATURES
-
-# Print basic summary of graph to `logger=ConsoleLogger()`.
-# """
-# function printSummary(dfg::G, logger=ConsoleLogger()) where G <: AbstractDFG
-#     printGraphSummary(dfg, logger)
-# end
-
-
-
-# """
-#     $SIGNATURES
-
-# Calculate both measured and predicted relative variable values, starting with `from` at zeros up to `to::Symbol`.
-
-# Notes
-# - assume single variable separators only.
-
-# DevNotes
-# - TODO better consolidate with [`approxConvBelief`](@ref) which can now also work with factor chains.
-# """
-# function accumulateFactorChain( dfg::AbstractDFG,
-#                                 from::Symbol,
-#                                 to::Symbol,
-#                                 fsyms::Vector{Symbol}=findFactorsBetweenNaive(dfg, from, to);
-#                                 initval=zeros(size(getVal(dfg, from))))
-
-#   # get associated variables
-#   svars = union(ls.(dfg, fsyms)...)
-
-#   # use subgraph copys to do calculations
-#   tfg_meas = buildSubgraph(dfg, [svars;fsyms])
-#   tfg_pred = buildSubgraph(dfg, [svars;fsyms])
-
-#   # drive variable values manually to ensure no additional stochastics are introduced.
-#   nextvar = from
-#   initManual!(tfg_meas, nextvar, initval)
-#   initManual!(tfg_pred, nextvar, initval)
-
-#   # nextfct = fsyms[1] # for debugging
-#   for nextfct in fsyms
-#     nextvars = setdiff(ls(tfg_meas,nextfct),[nextvar])
-#     @assert length(nextvars) == 1 "accumulateFactorChain requires each factor pair to separated by a single variable"
-#     nextvar = nextvars[1]
-#     meas, pred = approxDeconv(dfg, nextfct) # solveFactorMeasurements
-#     pts_meas = approxConv(tfg_meas, nextfct, nextvar, (meas,ones(Int,100),collect(1:100)))
-#     pts_pred = approxConv(tfg_pred, nextfct, nextvar, (pred,ones(Int,100),collect(1:100)))
-#     initManual!(tfg_meas, nextvar, pts_meas)
-#     initManual!(tfg_pred, nextvar, pts_pred)
-#   end
-#   return getVal(tfg_meas,nextvar), getVal(tfg_pred,nextvar)
-# end
-
-
-# # TODO should this be consolidated with regular approxConv?
-# # TODO, perhaps pass Xi::Vector{DFGVariable} instead?
-# function approxConvBinary(arr::Vector{Vector{Float64}},
-#                           meas::AbstractFactor,
-#                           outdims::Int,
-#                           fmd::FactorMetadata,
-#                           measurement::Tuple=(Vector{Vector{Float64}}(),);
-#                           varidx::Int=2,
-#                           N::Int=length(arr),
-#                           vnds=DFGVariable[],
-#                           _slack=nothing )
-#   #
-#   # N = N == 0 ? size(arr,2) : N
-#   pts = [zeros(outdims) for _ in 1:N];
-#   ptsArr = Vector{Vector{Vector{Float64}}}()
-#   push!(ptsArr,arr)
-#   push!(ptsArr,pts)
-
-#   fmd.arrRef = ptsArr
-
-#   # TODO consolidate with ccwl??
-#   # FIXME do not divert Mixture for sampling
-#   # cf = _buildCalcFactorMixture(ccwl, fmd, 1, ccwl.measurement, ARR) # TODO perhaps 0 is safer
-#   # FIXME 0, 0, ()
-#   cf = CalcFactor( meas, fmd, 0, 0, (), ptsArr)
-
-#   measurement = length(measurement[1]) == 0 ? sampleFactor(cf, N) : measurement
-#   # measurement = size(measurement[1],2) == 0 ? sampleFactor(meas, N, fmd, vnds) : measurement
-
-#   zDim = length(measurement[1][1])
-#   ccw = CommonConvWrapper(meas, ptsArr[varidx], zDim, ptsArr, fmd, varidx=varidx, measurement=measurement)  # N=> size(measurement[1],2)
-
-#   for n in 1:N
-#     ccw.cpt[Threads.threadid()].particleidx = n
-#     _solveCCWNumeric!( ccw, _slack=_slack )
-#   end
-#   return pts
-# end
-
-@deprecate getParametricMeasurement(w...;kw...) getMeasurementParametric(w...;kw...)
-
-# function prtslperr(s)
-#   println(s)
-#   sleep(0.1)
-#   error(s)
-# end
-
-@deprecate getKDE(w...;kw...) getBelief(w...;kw...)
-
-@deprecate findRelatedFromPotential(w...;kw...) (calcProposalBelief(w...;kw...),)
-
-# function generateNullhypoEntropy( val::AbstractMatrix{<:Real},
-#                                   maxlen::Int,
-#                                   spreadfactor::Real=10  )
-#   #
-#   # covD = sqrt.(vec(Statistics.var(val,dims=2))) .+ 1e-3
-#   # cVar = diagm((spreadfactor*covD).^2)
-#   len = size(val, 1)
-#   cVar = diagm((spreadfactor*ones(len)).^2)
-#   mu = zeros( len )
-#   MvNormal( mu, cVar )
-# end
-
-# @deprecate calcFactorResidualTemporary( fct::AbstractRelative, measurement::Tuple, T_param_args...; kw...) calcFactorResidualTemporary(fct, measurement, tuple(((x->x[1]).(T_param_args))...), tuple(((x->x[2]).(T_param_args))...); kw...)
-
-# @deprecate _buildGraphByFactorAndTypes!(fct::AbstractFactor, TypeParams_vec...; kw...) _buildGraphByFactorAndTypes!(fct, (x->x[1]).(TypeParams_vec), (x->x[2]).(TypeParams_vec) ; kw...)
-
-# @deprecate _evalFactorTemporary!( fct::AbstractFactor, sfidx::Int, meas_single::Tuple, TypeParams_args...; kw...) _evalFactorTemporary!( fct,sfidx,meas_single, (x->x[1]).(TypeParams_args), (x->x[2]).(TypeParams_args); kw...)
-
-# MOVED TO AMP v0.4.6
-# # NOTE SWITCHED TO ON-MANIFOLD VERSION, but this used to give deviation
-# calcVariableCovarianceBasic(M::AbstractManifold, ptsArr::Vector{Vector{Float64}})
-#   # cannot calculate the stdev from uninitialized state
-#   # FIXME assume point type is only Vector{Float} at this time
-#   @cast arr[i,j] := ptsArr[j][i]
-#   msst = Statistics.std(arr, dims=2)
-#   # FIXME use adaptive scale, see #802
-#   msst_ = 0 < sum(1e-10 .< msst) ? maximum(msst) : 1.0
-#   return msst_
-# end
-
-@deprecate testFactorResidualBinary(fct, T1::InstanceType{InferenceVariable}, T2::InstanceType{InferenceVariable}, param1, param2, meas) calcFactorResidualTemporary(fct, (T1,T2), meas, (param1,param2))
-
-# """
-#     $SIGNATURES
-#
-# Return the directly achievable dimensionality of solve for each variable in a clique.
-#
-# Related
-#
-# getFactorSolvableDim
-# """
-# function getCliqVarPossibleDim(dfg::G,
-#                                     cliq::TreeClique,
-#                                     saturate::Bool=true,
-#                                     fraction::Bool=true  )::Dict{Symbol, Float64}
-#   #
-#   # variables and factors associated with this clique
-#   vars = getCliqAllVarIds(cliq)
-#   # fcts = getCliqAllFactIds(cliq)
-#   # rows = length(fcts)
-#   cols = length(vars)
-#
-#   # for output result
-#   dict = Dict{Symbol,Float64}()
-#
-#   for j in 1:cols
-#     dict[vars[j]] += getVariablePossibleDim(dfg, vars[j])
-#   end
-#
-# end
-
-
-# getManifold(::InstanceType{LinearRelative{N}}) where {N} = Euclidean(N)
-# getManifolds(::T) where {T <: LinearRelative} = convert(Tuple, getManifold(T))
-# getManifolds(::Type{<:T}) where {T <: LinearRelative} = convert(Tuple, getManifold(T))
-# getManifolds(fctType::Type{LinearRelative}) = getManifolds(getDomain(fctType))
-
-# # FIXME, why is Manifolds depdendent on the solveKey?? Should just be at DFGVariable level?
-
-# getManifolds(vd::VariableNodeData) = getVariableType(vd) |> getManifolds
-# getManifolds(::DFGVariable{T}) where T <: InferenceVariable = getManifolds(T)
-# getManifolds(dfg::AbstractDFG, sym::Symbol) = getManifolds(getVariable(dfg, sym))
-#
-# getManifolds(vartype::InferenceVariable) = vartype.manifolds
-# getManifolds(vartype::Type{<: InferenceVariable}) = getManifolds(vartype())
-
-
-# import DistributedFactorGraphs: getfnctype
-# # TODO: Refactor - was is das?
-# function getfnctype(data::GenericFunctionNodeData)
-#   if typeof(data).name.name == :VariableNodeData
-#     return VariableNodeData
-#   end
-#   return data.fnc.usrfnc!
-# end
-#
-# function getfnctype(fact::DFGFactor; solveKey::Symbol=:default)
-#   data = getData(fact) # TODO , solveKey=solveKey)
-#   return getfnctype(data)
-# end
-#
-# function getfnctype(dfg::T, lbl::Symbol; solveKey::Symbol=:default) where T <: AbstractDFG
-#   getfnctype(getFactor(dfg, exvertid))
-# end
-
-# function manikde!(ptsArr::Vector{Vector{Float64}}, manis::Tuple)
-#   arr = Matrix{Float64}(undef, length(ptsArr[1]), length(ptsArr))
-#   @cast arr[i,j] = ptsArr[j][i]
-#   manikde!(arr, manis)
-# end
-
 
 
 #

From b1d7d1dd7721c7875a8e55343c8b1761d1845824 Mon Sep 17 00:00:00 2001
From: dehann <dehann.fourie@gmail.com>
Date: Sat, 12 Feb 2022 00:30:30 -0500
Subject: [PATCH 2/3] compat and depr fixes

---
 Project.toml                     |  2 +-
 src/ExportAPI.jl                 |  1 +
 src/IncrementalInference.jl      |  6 +++---
 test/runtests.jl                 | 31 ++++++++++++++++++-------------
 test/testBasicForwardConvolve.jl |  4 ++--
 test/testpartialconstraint.jl    |  8 ++++----
 6 files changed, 29 insertions(+), 23 deletions(-)

diff --git a/Project.toml b/Project.toml
index b2fc02105..b6f1ecc9c 100644
--- a/Project.toml
+++ b/Project.toml
@@ -42,7 +42,7 @@ TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53"
 UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
 
 [compat]
-ApproxManifoldProducts = "0.4.23"
+ApproxManifoldProducts = "0.4.24"
 BSON = "0.2, 0.3"
 Combinatorics = "1.0"
 DataStructures = "0.16, 0.17, 0.18"
diff --git a/src/ExportAPI.jl b/src/ExportAPI.jl
index 5208f13ae..7c3d3b1b5 100644
--- a/src/ExportAPI.jl
+++ b/src/ExportAPI.jl
@@ -209,6 +209,7 @@ export
 
   # Bayes (Junction) Tree
   evalFactor,
+  calcProposalBelief,
   approxConvBelief,
   approxConv,
 
diff --git a/src/IncrementalInference.jl b/src/IncrementalInference.jl
index ee10613f5..98f20cfad 100644
--- a/src/IncrementalInference.jl
+++ b/src/IncrementalInference.jl
@@ -90,9 +90,9 @@ const FSM = FunctionalStateMachine
 const IIF = IncrementalInference
 
 
-const InstanceType{T} = Union{Type{<:T},T}
-const NothingUnion{T} = Union{Nothing, T}
-const BeliefArray{T} = Union{Array{T,2}, Adjoint{T, Array{T,2}} } # TBD deprecate?
+const InstanceType{T} = Union{Type{<:T},<:T}
+const NothingUnion{T} = Union{Nothing, <:T}
+const BeliefArray{T} = Union{<:AbstractMatrix{<:T}, <:Adjoint{<:T, AbstractMatrix{<:T}} } # TBD deprecate?
 
 ## =============================
 # API Exports
diff --git a/test/runtests.jl b/test/runtests.jl
index 157cea005..645ef2faf 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -7,19 +7,19 @@ include("testSphereMani.jl")
 include("testSpecialOrthogonalMani.jl")
 include("testSpecialEuclidean2Mani.jl")
 
+# start as basic as possible and build from there
+include("typeReturnMemRef.jl")
 include("testDistributionsGeneric.jl")
 include("testHeatmapGridDensity.jl")
 include("testCliqSolveDbgUtils.jl")
+include("basicGraphsOperations.jl")
+
 include("TestModuleFunctions.jl")
 include("testCompareVariablesFactors.jl")
-include("typeReturnMemRef.jl")
-include("testApproxConv.jl")
-include("basicGraphsOperations.jl")
-include("testMixturePrior.jl")
-include("testGradientUtils.jl")
-include("testPartialFactors.jl")
-include("testPartialPrior.jl")
+include("saveconvertertypes.jl")
+include("testgraphpackingconverters.jl")
 include("testSaveLoadDFG.jl")
+
 include("testJunctionTreeConstruction.jl")
 include("testBayesTreeiSAM2Example.jl")
 include("testTreeFunctions.jl")
@@ -27,18 +27,23 @@ include("testTreeFunctions.jl")
 #FIXME fails on MetaBayesTree
 include("testTreeSaveLoad.jl")
 
+include("testGradientUtils.jl")
 include("testFactorGradients.jl")
-include("testpartialconstraint.jl")
-include("testPartialNH.jl")
-
 include("testSpecialSampler.jl") # TODO, rename, refine
-include("saveconvertertypes.jl")
-include("testgraphpackingconverters.jl")
 include("testNLsolve.jl")
 include("testCommonConvWrapper.jl")
+include("testFactorMetadata.jl")
+
+include("testApproxConv.jl")
 include("testBasicForwardConvolve.jl")
 include("testDefaultDeconv.jl")
-include("testFactorMetadata.jl")
+
+include("testPartialFactors.jl")
+include("testPartialPrior.jl")
+include("testpartialconstraint.jl")
+include("testPartialNH.jl")
+include("testMixturePrior.jl")
+
 include("testStateMachine.jl")
 include("testBasicCSM.jl")
 include("testCliqueFactors.jl")
diff --git a/test/testBasicForwardConvolve.jl b/test/testBasicForwardConvolve.jl
index 32171bfbc..e1c44f5a1 100644
--- a/test/testBasicForwardConvolve.jl
+++ b/test/testBasicForwardConvolve.jl
@@ -45,9 +45,9 @@ X1_ = forwardConvolve(X0, statemodel)
 
 ## measure -- product of beliefs, using `ApproxManifoldProducts.jl`
 
-predX1 = manikde!(X1_, ContinuousScalar)
+predX1 = manikde!(ContinuousScalar, X1_)
 z3 = Normal(9.5,0.75)
-measX1 = manikde!([rand(z3,1) for _ in 1:100], ContinuousScalar)
+measX1 = manikde!(ContinuousScalar, [rand(z3,1) for _ in 1:100])
 
 # do actual product
 posterioriX1 = predX1 * measX1
diff --git a/test/testpartialconstraint.jl b/test/testpartialconstraint.jl
index b3dc40c65..8334936dc 100644
--- a/test/testpartialconstraint.jl
+++ b/test/testpartialconstraint.jl
@@ -201,8 +201,8 @@ end
 
 # keep previous values to ensure funciton evaluation is modifying correct data fields
 
-@warn "restore findRelatedFromPotential as testset!"
-# @testset "test findRelatedFromPotential..." begin
+@warn "restore calcProposalBelief as testset!"
+# @testset "test calcProposalBelief..." begin
 # global v2, fg, f3, f4, N
 
 
@@ -210,7 +210,7 @@ thefac = getFactor(fg, :x1x2f1)
 
 X2lpts_ = getVal(getVariable(fg, :x2))
 @cast X2lpts[i,j] := X2lpts_[j][i]
-keepaside, = findRelatedFromPotential(fg, thefac, :x2, N=N)
+keepaside, = (calcProposalBelief(fg, thefac, :x2, N=N),)
 @test Ndim(keepaside) == 2
 lpts_ = getPoints(keepaside, false)
 @cast lpts[i,j] := lpts_[j][i]
@@ -232,7 +232,7 @@ memcheck_ = getVal(v2)
 
 X2lpts_ = getVal(v2)
 @cast X2lpts[i,j] := X2lpts_[j][i]
-p4, = findRelatedFromPotential(fg, f4, v2.label, N=N)
+p4 = calcProposalBelief(fg, f4, v2.label, N=N)
 @test Ndim(p4) == 2
 lpts_ = getPoints(keepaside, false)
 @cast lpts[i,j] := lpts_[j][i]

From 732aee83348a804809a80dd12bb76d01aefa9afd Mon Sep 17 00:00:00 2001
From: dehann <dehann.fourie@gmail.com>
Date: Sat, 12 Feb 2022 01:48:46 -0500
Subject: [PATCH 3/3] fix depr'd calls

---
 attic/examples/MultiHypo2Door.jl     | 16 +++++++-------
 attic/examples/MultiHypo3Door.jl     | 18 ++++++++--------
 attic/examples/squarefixedpoint.jl   |  2 +-
 src/Deprecated.jl                    |  2 ++
 src/FactorGraph.jl                   |  6 +++---
 src/TreeMessageUtils.jl              | 15 +------------
 test/priorusetest.jl                 | 16 +++++++-------
 test/testCommonConvWrapper.jl        |  4 ++--
 test/testEuclidDistance.jl           |  4 ++--
 test/testMixtureLinearConditional.jl |  2 +-
 test/testMultihypoAndChain.jl        |  2 +-
 test/testProductReproducable.jl      | 32 ++++++++++++++--------------
 12 files changed, 54 insertions(+), 65 deletions(-)

diff --git a/attic/examples/MultiHypo2Door.jl b/attic/examples/MultiHypo2Door.jl
index c6f06507f..d6f4861ec 100644
--- a/attic/examples/MultiHypo2Door.jl
+++ b/attic/examples/MultiHypo2Door.jl
@@ -124,25 +124,25 @@ solveTree!(fg, tree)
 
 stuff = treeProductUp(fg, tree, :l0, :x0)
 X0 = manikde!(stuff[1], (:Euclid,))
-plotKDE([X0; getKDE(fg, :x0)], c=["red";"green"])
+plotKDE([X0; getBelief(fg, :x0)], c=["red";"green"])
 setValKDE!(fg, :x0, X0)
 
 
 stuff = treeProductUp(fg, tree, :l0, :x1)
 X1 = manikde!(stuff[1], (:Euclid,))
-plotKDE([X1; getKDE(fg, :x1)], c=["red";"green"])
+plotKDE([X1; getBelief(fg, :x1)], c=["red";"green"])
 setValKDE!(fg, :x1, X1)
 
 
 stuff = treeProductUp(fg, tree, :l0, :l0)
 L0 = manikde!(stuff[1], (:Euclid,))
-plotKDE([L0; getKDE(fg, :l0)], c=["red";"green"])
+plotKDE([L0; getBelief(fg, :l0)], c=["red";"green"])
 setValKDE!(fg, :l0, L0)
 
 
 stuff = treeProductUp(fg, tree, :l0, :l1)
 L1 = manikde!(stuff[1], (:Euclid,))
-plotKDE([L1; getKDE(fg, :l1)], c=["red";"green"])
+plotKDE([L1; getBelief(fg, :l1)], c=["red";"green"])
 setValKDE!(fg, :l1, L1)
 
 
@@ -168,14 +168,14 @@ setValKDE!(fg, :l1, L1)
 # :x0l0l1f1, :x0x1f1
 ptsX0, = predictbelief(fg, :x0, [:x0l0l1f1; :x0x1f1])
 X0 = manikde!(ptsX0, (:Euclid,))
-plotKDE([X0; getKDE(fg, :x0)], c=["red";"green"])
+plotKDE([X0; getBelief(fg, :x0)], c=["red";"green"])
 setValKDE!(fg, :x0, X0)
 
 # get factors for :x1 in clique2:
 # :x1l0l1f1, :x0x1f1
 ptsX1, = predictbelief(fg, :x1, [:x1l0l1f1, :x0x1f1])
 X1 = manikde!(ptsX1, (:Euclid,))
-plotKDE([X1; getKDE(fg, :x1)], c=["red";"green"])
+plotKDE([X1; getBelief(fg, :x1)], c=["red";"green"])
 setValKDE!(fg, :x1, X1)
 
 
@@ -183,7 +183,7 @@ setValKDE!(fg, :x1, X1)
 # :x0l0l1f1, :x1l0l1f1, :l0f1
 ptsL0, = predictbelief(fg, :l0, [:x0l0l1f1, :x1l0l1f1, :l0f1])
 L0 = manikde!(ptsL0, (:Euclid,))
-plotKDE([L0; getKDE(fg, :l0)], c=["red";"green"])
+plotKDE([L0; getBelief(fg, :l0)], c=["red";"green"])
 setValKDE!(fg, :l0, L0)
 
 
@@ -191,7 +191,7 @@ setValKDE!(fg, :l0, L0)
 # :x0l0l1f1, :x1l0l1f1, :l1f1
 ptsL1, = predictbelief(fg, :l1, [:x0l0l1f1, :x1l0l1f1, :l1f1])
 L1 = manikde!(ptsL1, (:Euclid,))
-plotKDE([L1; getKDE(fg, :l1)], c=["red";"green"])
+plotKDE([L1; getBelief(fg, :l1)], c=["red";"green"])
 setValKDE!(fg, :l1, L1)
 
 
diff --git a/attic/examples/MultiHypo3Door.jl b/attic/examples/MultiHypo3Door.jl
index a0edd0684..37081f477 100644
--- a/attic/examples/MultiHypo3Door.jl
+++ b/attic/examples/MultiHypo3Door.jl
@@ -127,7 +127,7 @@ solveTree!(fg, tree)
 plotLocalProduct(fg, :x1, sidelength=20cm)
 ptsX1, = predictbelief(fg, :x1, [:x1l0l1l2f1, :x0x1f1])
 X1 = manikde!(ptsX1, (:Euclid,))
-plotKDE([X1; getKDE(fg, :x1)], c=["red";"green"])
+plotKDE([X1; getBelief(fg, :x1)], c=["red";"green"])
 setValKDE!(fg, :x1, X1)
 
 
@@ -136,7 +136,7 @@ setValKDE!(fg, :x1, X1)
 plotLocalProduct(fg, :x0, sidelength=20cm)
 ptsX0, = predictbelief(fg, :x0, [:x0l0l1l2f1; :x0x1f1])
 X0 = manikde!(ptsX0, (:Euclid,))
-plotKDE([X0; getKDE(fg, :x0)], c=["red";"green"])
+plotKDE([X0; getBelief(fg, :x0)], c=["red";"green"])
 setValKDE!(fg, :x0, X0)
 
 
@@ -145,7 +145,7 @@ setValKDE!(fg, :x0, X0)
 plotLocalProduct(fg, :l2, sidelength=20cm)
 ptsL2, = predictbelief(fg, :l2, [:x0l0l1l2f1, :x1l0l1l2f1, :l2f1])
 L2 = manikde!(ptsL2, (:Euclid,))
-plotKDE([L2; getKDE(fg, :l2)], c=["red";"green"])
+plotKDE([L2; getBelief(fg, :l2)], c=["red";"green"])
 setValKDE!(fg, :l2, L2)
 
 
@@ -154,7 +154,7 @@ setValKDE!(fg, :l2, L2)
 plotLocalProduct(fg, :l1, sidelength=20cm)
 ptsL1, = predictbelief(fg, :l1, [:x0l0l1l2f1, :x1l0l1l2f1, :l1f1])
 L1 = manikde!(ptsL1, (:Euclid,))
-plotKDE([L1; getKDE(fg, :l1)], c=["red";"green"])
+plotKDE([L1; getBelief(fg, :l1)], c=["red";"green"])
 setValKDE!(fg, :l1, L1)
 
 
@@ -163,7 +163,7 @@ setValKDE!(fg, :l1, L1)
 plotLocalProduct(fg, :l0, sidelength=20cm)
 ptsL0, = predictbelief(fg, :l0, [:x0l0l1l2f1, :x1l0l1l2f1, :l0f1])
 L0 = manikde!(ptsL0, (:Euclid,))
-plotKDE([L0; getKDE(fg, :l0)], c=["red";"green"])
+plotKDE([L0; getBelief(fg, :l0)], c=["red";"green"])
 setValKDE!(fg, :l0, L0)
 
 
@@ -173,15 +173,15 @@ setValKDE!(fg, :l0, L0)
 
 ptsX2, = predictbelief(fg, :x2, [:x1x2f1;])
 X2 = manikde!(ptsX2, (:Euclid,))
-plotKDE([X2; getKDE(fg, :x2)], c=["red";"green"])
+plotKDE([X2; getBelief(fg, :x2)], c=["red";"green"])
 setValKDE!(fg, :x2, X2)
 
 
-upmsgX1 = deepcopy(getKDE(fg, :x1))
+upmsgX1 = deepcopy(getBelief(fg, :x1))
 ptsX1 = approxConv(fg, :x1x2f1, :x1)
 pX1 = manikde!(ptsX1, (:Euclid,))
 X1 = manifoldProduct([upmsgX1; pX1], (:Euclid,))
-plotKDE([X1; getKDE(fg, :x1)], c=["red";"green"])
+plotKDE([X1; getBelief(fg, :x1)], c=["red";"green"])
 setValKDE!(fg, :x1, X1)
 
 
@@ -194,7 +194,7 @@ setValKDE!(fg, :x1, X1)
 # :x0l0l1l2f1, :x1l0l1l2f1, :l1f1
 ptsL1, = predictbelief(fg, :l1, [:x0l0l1l2f1, :x1l0l1l2f1, :l1f1])
 L1 = manikde!(ptsL1, (:Euclid,))
-plotKDE([L1; getKDE(fg, :l1)], c=["red";"green"])
+plotKDE([L1; getBelief(fg, :l1)], c=["red";"green"])
 setValKDE!(fg, :l1, L1)
 
 
diff --git a/attic/examples/squarefixedpoint.jl b/attic/examples/squarefixedpoint.jl
index 56381722a..5e1dae232 100644
--- a/attic/examples/squarefixedpoint.jl
+++ b/attic/examples/squarefixedpoint.jl
@@ -41,4 +41,4 @@ initManual!(fg, :x, randn(1,100))
 tree = solveTree!(fg)
 
 ## plot the result
-plotKDE(map(x->getKDE(fg,x), [:x; :xy]))
+plotKDE(map(x->getBelief(fg,x), [:x; :xy]))
diff --git a/src/Deprecated.jl b/src/Deprecated.jl
index bd76586e8..466c5d4c9 100644
--- a/src/Deprecated.jl
+++ b/src/Deprecated.jl
@@ -32,6 +32,8 @@ end
 ## Deprecate code below before v0.29
 ##==============================================================================
 
+@deprecate kde!(em::TreeBelief) manikde!(em)
+
 # DFG v0.18/19
 export FunctorInferenceType, PackedInferenceType
 
diff --git a/src/FactorGraph.jl b/src/FactorGraph.jl
index 155f8f1f0..5aed5cb6f 100644
--- a/src/FactorGraph.jl
+++ b/src/FactorGraph.jl
@@ -316,7 +316,7 @@ function DefaultNodeDataParametric( dodims::Int,
   # this should be the only function allocating memory for the node points
   if false && initialized
     error("not implemented yet")
-    # pN = AMP.manikde!(randn(dims, N), variableType.manifolds);
+    # pN = AMP.manikde!(variableType.manifold, randn(dims, N));
     #
     # sp = Int[0;] #round.(Int,range(dodims,stop=dodims+dims-1,length=dims))
     # gbw = getBW(pN)[:,1]
@@ -392,12 +392,12 @@ function setDefaultNodeData!( v::DFGVariable,
 end
 # if size(initval,2) < N && size(initval, 1) == dims
 #   @warn "setDefaultNodeData! -- deprecated use of stdev."
-#   p = AMP.manikde!(initval,diag(stdev), varType.manifolds);
+#   p = manikde!(varType.manifold, initval,diag(stdev));
 #   pN = resample(p,N)
 # if size(initval,2) < N && size(initval, 1) != dims
   # @info "Node value memory allocated but not initialized"
 # else
-#   pN = AMP.manikde!(initval, varType.manifolds)
+#   pN = manikde!(varType.manifold, initval)
 # end
 # dims = size(initval,1) # rows indicate dimensions
 
diff --git a/src/TreeMessageUtils.jl b/src/TreeMessageUtils.jl
index 40d7d3e74..c150e3ab9 100644
--- a/src/TreeMessageUtils.jl
+++ b/src/TreeMessageUtils.jl
@@ -13,19 +13,6 @@ export addLikelihoodsDifferentialCHILD!
 
 convert(::Type{<:ManifoldKernelDensity}, src::TreeBelief) = manikde!(getManifold(src.variableType), src.val, bw=src.bw[:,1])
 
-"""
-    $(SIGNATURES)
-
-Construct a BallTreeDensity KDE object from an IIF.TreeBelief object.
-
-Related
-
-manikde!, getKDE, getKDEMax, getKDEMean, TreeBelief
-"""
-function kde!(em::TreeBelief)
-  # return AMP.manikde!(em.val, em.bw, em.manifolds)
-  return convert(ManifoldKernelDensity, em)
-end
 manikde!(em::TreeBelief) = convert(ManifoldKernelDensity, em)
 
 
@@ -258,7 +245,7 @@ function addLikelihoodsDifferential!( msgs::LikelihoodMessage,
   #     afc = addFactor!(tfg, [sym1_;sym2_], nfct, graphinit=false, tags=[:DUMMY;])
   #     # calculate the general deconvolution between variables
   #     pts = solveFactorMeasurements(tfg, afc.label)
-  #     newBel = manikde!(pts[1], getManifolds(nfactype))
+  #     newBel = manikde!(getManifold(nfactype), pts[1])
   #     # replace dummy factor with real deconv factor using manikde approx belief measurement
   #     fullFct = nfactype(newBel)
   #     deleteFactor!(tfg, afc.label)
diff --git a/test/priorusetest.jl b/test/priorusetest.jl
index 0f943ac82..eb2940ce9 100644
--- a/test/priorusetest.jl
+++ b/test/priorusetest.jl
@@ -36,9 +36,9 @@ addFactor!(fg, [:x1; :x2], LinearRelative(Normal(0.0, 0.01)))
 
 #solve
 tree = solveTree!(fg)
-x0_m = getKDEMean(getKDE(getVariable(fg, :x0)))[1]
-x1_m = getKDEMean(getKDE(getVariable(fg, :x1)))[1]
-x2_m = getKDEMean(getKDE(getVariable(fg, :x2)))[1]
+x0_m = getKDEMean(getBelief(getVariable(fg, :x0)))[1]
+x1_m = getKDEMean(getBelief(getVariable(fg, :x1)))[1]
+x2_m = getKDEMean(getBelief(getVariable(fg, :x2)))[1]
 
 @info ("Testing means = 0 with 2 priors:\ngraphinit=$graphinit\nMeans: x0: $(x0_m), x1: $x1_m, x2: $x2_m")
 
@@ -90,11 +90,11 @@ addFactor!(fg, [:x2; :l1], LinearRelative(Normal(0, 0.01)))
 #solve
 tree = solveTree!(fg)
 
-x0_m = getKDEMean(getKDE(getVariable(fg, :x0)))[1]
-x1_m = getKDEMean(getKDE(getVariable(fg, :x1)))[1]
-x2_m = getKDEMean(getKDE(getVariable(fg, :x2)))[1]
-l0_m = getKDEMean(getKDE(getVariable(fg, :l0)))[1]
-l1_m = getKDEMean(getKDE(getVariable(fg, :l1)))[1]
+x0_m = getKDEMean(getBelief(getVariable(fg, :x0)))[1]
+x1_m = getKDEMean(getBelief(getVariable(fg, :x1)))[1]
+x2_m = getKDEMean(getBelief(getVariable(fg, :x2)))[1]
+l0_m = getKDEMean(getBelief(getVariable(fg, :l0)))[1]
+l1_m = getKDEMean(getBelief(getVariable(fg, :l1)))[1]
 
 @info ("Testing means = 0 with 2 priors:\ngraphinit=$graphinit\nMeans: x0: $(x0_m), x1: $x1_m, x2: $x2_m, l0: $l0_m, l1: $l1_m")
 
diff --git a/test/testCommonConvWrapper.jl b/test/testCommonConvWrapper.jl
index 0746814c8..cdbb04808 100644
--- a/test/testCommonConvWrapper.jl
+++ b/test/testCommonConvWrapper.jl
@@ -154,7 +154,7 @@ end
 
 N=100
 p1 = [randn(1) for _ in 1:N]
-d1 = manikde!(p1, Euclidean(1))
+d1 = manikde!(Euclidean(1), p1)
 p2 = [randn(1) for _ in 1:N]
 t = Vector{Vector{Vector{Float64}}}()
 push!(t,p1)
@@ -165,7 +165,7 @@ fg = initfg()
 v1=addVariable!(fg, :x1, ContinuousScalar, N=N)
 v2=addVariable!(fg, :x2, ContinuousScalar, N=N)
 bws = getBW(d1)[:,1]
-f1 = addFactor!(fg, [v1], Prior(manikde!(p1, bws, TranslationGroup(1))) )
+f1 = addFactor!(fg, [v1], Prior(manikde!(TranslationGroup(1), p1, bw=bws)) )
 
 odo = Pose1Pose1Test(Normal(100.0,1.0))
 f2 = addFactor!(fg, [v1;v2], odo)
diff --git a/test/testEuclidDistance.jl b/test/testEuclidDistance.jl
index ec6c1d983..d1be82a28 100644
--- a/test/testEuclidDistance.jl
+++ b/test/testEuclidDistance.jl
@@ -250,7 +250,7 @@ end
 ##
 
 # pts = approxConv(fg, :x2l1f1, :l1)
-# plotKDE(manikde!(pts, ContinuousEuclid{2}))
+# plotKDE(manikde!(ContinuousEuclid{2}, pts))
 # plotLocalProduct(fg, :l1, levels=3)
 
 
@@ -277,4 +277,4 @@ end
 
 # initManual!(sfg, :l1, pts)
 # pts = approxConv(sfg, :x2l1f1, :l1)
-# plotKDE(manikde!(pts, ContinuousEuclid{2}))
\ No newline at end of file
+# plotKDE(manikde!(ContinuousEuclid{2}, pts))
\ No newline at end of file
diff --git a/test/testMixtureLinearConditional.jl b/test/testMixtureLinearConditional.jl
index 92d30e9a0..fc50a3326 100644
--- a/test/testMixtureLinearConditional.jl
+++ b/test/testMixtureLinearConditional.jl
@@ -86,7 +86,7 @@ addVariable!(fg, :x1, ContinuousScalar)
 mp = Mixture(Prior, [Normal(); Normal(10,1)], [0.5;0.5])
 f0 = addFactor!(fg, [:x0;], mp)
 
-mr = Mixture(LinearRelative, (fancy=manikde!([randn(1) for _ in 1:75], ContinuousEuclid(1)), naive=Normal(0,10)), [0.4;0.6])
+mr = Mixture(LinearRelative, (fancy=manikde!(ContinuousEuclid(1), [randn(1) for _ in 1:75]), naive=Normal(0,10)), [0.4;0.6])
 f1 = addFactor!(fg, [:x0;:x1], mr)
 
 ##
diff --git a/test/testMultihypoAndChain.jl b/test/testMultihypoAndChain.jl
index aaf4cc942..2bdf43a50 100644
--- a/test/testMultihypoAndChain.jl
+++ b/test/testMultihypoAndChain.jl
@@ -78,7 +78,7 @@ tree = solveTree!(fg, eliminationOrder=eo) #, smtasks=smtasks, recordcliqs=ls(fg
 L2 = getBelief(fg, :l2)
 npts = length(getPoints(L2))
 pts = [2.0.+0.1*randn(1) for _ in 1:npts]
-L2_ = manikde!(pts, ContinuousScalar)
+L2_ = manikde!(ContinuousScalar, pts)
 
 # test that there is at least a mode present
 @test mmd(L2_, L2, ContinuousScalar) < 1e-3
diff --git a/test/testProductReproducable.jl b/test/testProductReproducable.jl
index add834426..f7139c61b 100644
--- a/test/testProductReproducable.jl
+++ b/test/testProductReproducable.jl
@@ -28,17 +28,17 @@ initAll!(fg)
 tree = solveTree!(fg)
 
 
-@test (Statistics.mean(getPoints(getKDE(fg, :a)))- 0 |> abs) < 3
-@test (Statistics.mean(getPoints(getKDE(fg, :b)))-10 |> abs) < 4
-@test (Statistics.mean(getPoints(getKDE(fg, :c)))-20 |> abs) < 4
-@test (Statistics.mean(getPoints(getKDE(fg, :d)))-30 |> abs) < 5
-@test (Statistics.mean(getPoints(getKDE(fg, :e)))-40 |> abs) < 5
+@test (Statistics.mean(getPoints(getBelief(fg, :a)))- 0 |> abs) < 3
+@test (Statistics.mean(getPoints(getBelief(fg, :b)))-10 |> abs) < 4
+@test (Statistics.mean(getPoints(getBelief(fg, :c)))-20 |> abs) < 4
+@test (Statistics.mean(getPoints(getBelief(fg, :d)))-30 |> abs) < 5
+@test (Statistics.mean(getPoints(getBelief(fg, :e)))-40 |> abs) < 5
 
-@test 0.3 < Statistics.std(getPoints(getKDE(fg, :a))) < 2
-@test 0.5 < Statistics.std(getPoints(getKDE(fg, :b))) < 4
-@test 0.9 < Statistics.std(getPoints(getKDE(fg, :c))) < 6
-@test 1.2 < Statistics.std(getPoints(getKDE(fg, :d))) < 7
-@test 1.5 < Statistics.std(getPoints(getKDE(fg, :e))) < 8
+@test 0.3 < Statistics.std(getPoints(getBelief(fg, :a))) < 2
+@test 0.5 < Statistics.std(getPoints(getBelief(fg, :b))) < 4
+@test 0.9 < Statistics.std(getPoints(getBelief(fg, :c))) < 6
+@test 1.2 < Statistics.std(getPoints(getBelief(fg, :d))) < 7
+@test 1.5 < Statistics.std(getPoints(getBelief(fg, :e))) < 8
 
 
 # drawTree(tree, show=true)
@@ -61,25 +61,25 @@ addFactor!(fg, [:a;:b], LinearRelative(Normal(10, 1)), graphinit=false)
 initManual!(fg, :a, randn(1,100))
 initManual!(fg, :b, 10 .+randn(1,100))
 
-A = getKDE(fg, :a)
-B = getKDE(fg, :b)
+A = getBelief(fg, :a)
+B = getBelief(fg, :b)
 # plotKDE(fg, [:a; :b])
 
 # repeat many times to ensure the means stay put and covariances spread out
 for i in 1:10
   pts = approxConv(fg, :abf1, :b)
-  B_ = manikde!(pts,ContinuousScalar)
+  B_ = manikde!(ContinuousScalar, pts)
   # plotKDE([B_; B])
   initManual!(fg, :b, B_)
 
   pts = approxConv(fg, :abf1, :a)
-  A_ = manikde!(pts, ContinuousScalar)
+  A_ = manikde!(ContinuousScalar, pts)
   # plotKDE([A_; A])
   initManual!(fg, :a, A_)
 end
 
-A_ = getKDE(fg, :a)
-B_ = getKDE(fg, :b)
+A_ = getBelief(fg, :a)
+B_ = getBelief(fg, :b)
 # plotKDE([A_; B_; A; B])
 
 @test (Statistics.mean(getPoints(A)) |> abs) < 1