diff --git a/NEWS.md b/NEWS.md index 78aa0a98f..fae3539eb 100644 --- a/NEWS.md +++ b/NEWS.md @@ -11,7 +11,7 @@ Alternatively, either use the Github Blame, or the Github `/compare/v0.18.0...v0 The list below highlights major breaking changes, and please note that significant efforts are made to properly deprecate old code/APIs according to normal semver workflow -- i.e. breaking changes go through at least one deprecatation (via warnings) on the dominant number in the version number. E.g. v0.18 -> v0.19 (warnings) -> v0.20 (breaking). -# Major Changes in v0.27 +# Changes in v0.27 - InMemDFGType is deprecated in favor of LocalDFG (exported from DistributedFactorGraphs). - Factor serialization is now top level JSON only #1476. @@ -23,14 +23,16 @@ The list below highlights major breaking changes, and please note that significa - Refactor getDimension and sampling, #1463. - Language upgrades on `qr` for Julia 1.7, #1464. - Various other fixes and upgrades, https://github.com/JuliaRobotics/IncrementalInference.jl/milestone/111?closed=1 +- Add distribution serialization for Rayleigh. +- Add `Position{N}` and `Position1`..`Position4` as new standard and aliases for `ContinuousScalar`, `ContinuousEuclid{N}`. -# Major changes in v0.26 +# Changes in v0.26 - Standarding (non-binding) easy factor dipatch cases so measurement field is under `.Z` (#1441). - `CalcFactor._allowThreads` can now be used as workaround for `Threads` yield blocking issue during first run (#1451). - Canonical graph generator API change to `generateGraph_ABC` (#1454). -# Major changes in v0.25 +# Changes in v0.25 - Changed API to `testFactorResidualBinary(fct, meas::Tuple, (T_i, param_i),...)` to grow beyond binary. - PPE methods used keyword `method::AbstractPointParametricType` which is now replaced with the keyword `ppeType`. @@ -46,23 +48,23 @@ The list below highlights major breaking changes, and please note that significa - `solveTree!` / `solveGraph!` now returns just one value `tree<:AbstractBayesTree`. Previous version returned three values, `tree, smt, hist` (#1379). - **Note for v0.25.5** Serialization of newly introduced type `PackedHeatmapGridDensity` changed from v0.25.4, unlikely have yet been used publically, therefore emphasizing fastest possible standardization in this case (even though this particular event does not strictly follow semver). General usage and operation is effectively unchanged,see #1435. -# Major changes in v0.24 +# Changes in v0.24 - Update compat for ManifoldsBase.jl v0.11 with `AbstractManifold`. - Transition to only `getManifold` (instead of `getManifolds`), thereby moving towards exclusively using Manifolds.jl, see #1234. - Deprecate use of `getFactorMean`, use `IIF.getParametricMeasurement` instead. - Upstreamed `is/set Marginalized` to DFG (#1269). -# Major changes in v0.23 +# Changes in v0.23 - New `@defVariable` only uses `ManifoldsBase.Manifold` as base abstraction for variable types. -# Major changes in v0.22 +# Changes in v0.22 - Work in progress toward `ManifoldsBase.Manifold` as base abstraction for variable types. -# Major changes in v0.21 +# Changes in v0.21 - `CalcResidual` no longer takes a `residual` as input parameter and should return `residual`, see #467 . -# Major changes in v0.20 +# Changes in v0.20 - The user factor API call strategy has been simplified via `CalcResidual`, see #467 for details. - User factor API for `getSample` and `.specialsampler` has been standardized via `CalcResidual` (#927) -- for ongoing work please follow #1099 and #1094 and #1069. diff --git a/Project.toml b/Project.toml index 1195832ce..d3e7b2e25 100644 --- a/Project.toml +++ b/Project.toml @@ -2,7 +2,7 @@ name = "IncrementalInference" uuid = "904591bb-b899-562f-9e6f-b8df64c7d480" keywords = ["MM-iSAMv2", "Bayes tree", "junction tree", "Bayes network", "variable elimination", "graphical models", "SLAM", "inference", "sum-product", "belief-propagation"] desc = "Implements the Multimodal-iSAMv2 algorithm." -version = "0.27.1" +version = "0.27.2" [deps] ApproxManifoldProducts = "9bbbb610-88a1-53cd-9763-118ce10c1f89" @@ -55,14 +55,14 @@ JSON2 = "0.3" KernelDensityEstimate = "0.5.6" Manifolds = "0.7" ManifoldsBase = "0.12.6" -MetaGraphs = "0.6.4, 0.7" +MetaGraphs = "0.7" NLSolversBase = "7.6" NLsolve = "3, 4" -Optim = "0.22, 1.0" +Optim = "1" ProgressMeter = "1" Reexport = "1" Requires = "1" -StaticArrays = "0.15, 1" +StaticArrays = "1" StatsBase = "0.32, 0.33" TensorCast = "0.3.3, 0.4" TimeZones = "1.3.1" diff --git a/src/DefaultNodeTypes.jl b/src/DefaultNodeTypes.jl index 942f92373..7cdff3d08 100644 --- a/src/DefaultNodeTypes.jl +++ b/src/DefaultNodeTypes.jl @@ -11,8 +11,8 @@ Notes """ selectFactorType(Modl::Module, T1::Type{<:InferenceVariable}, T2::Type{<:InferenceVariable}) = getfield(Modl, Symbol(T1, T2)) selectFactorType(T1::Type{<:InferenceVariable}, T2::Type{<:InferenceVariable}) = selectFactorType(typeof(T1()).name.module, T1, T2) -selectFactorType(T1::Type{<:ContinuousScalar}, T2::Type{<:ContinuousScalar}) = LinearRelative{1} -selectFactorType(T1::Type{<:ContinuousEuclid{N}}, T2::Type{<:ContinuousEuclid{N}}) where N = LinearRelative{N} +selectFactorType(T1::Type{<:Position1}, T2::Type{<:Position1}) = LinearRelative{1} +selectFactorType(T1::Type{<:Position{N}}, T2::Type{<:Position{N}}) where N = LinearRelative{N} selectFactorType(T1::InferenceVariable, T2::InferenceVariable) = selectFactorType(typeof(T1), typeof(T2)) selectFactorType(dfg::AbstractDFG, s1::Symbol, s2::Symbol) = selectFactorType( getVariableType(dfg, s1), getVariableType(dfg, s2) ) diff --git a/src/ExportAPI.jl b/src/ExportAPI.jl index 7c3d3b1b5..2e6d63e77 100644 --- a/src/ExportAPI.jl +++ b/src/ExportAPI.jl @@ -232,8 +232,9 @@ export rand, fastnorm, - # new wrapper (experimental) + # Factor operational memory CommonConvWrapper, + CalcFactor, getCliqVarInitOrderUp, getCliqNumAssocFactorsPerVar, @@ -294,8 +295,8 @@ export setVariableRefence!, reshapeVec2Mat -export ContinuousScalar -export ContinuousEuclid +export Position, Position1, Position2, Position3, Position4 +export ContinuousScalar, ContinuousEuclid # TODO figure out if this will be deprecated, Caesar.jl #807 export Circular, Circle # serializing distributions @@ -305,6 +306,7 @@ export PackedUniform, PackedNormal export PackedZeroMeanDiagNormal, PackedZeroMeanFullNormal, PackedDiagNormal, PackedFullNormal export PackedManifoldKernelDensity export PackedAliasingScalarSampler, PackedHeatmapGridDensity, PackedLevelSetGridNormal +export PackedRayleigh export Mixture, PackedMixture diff --git a/src/FactorGraph.jl b/src/FactorGraph.jl index 5aed5cb6f..0a7e85c75 100644 --- a/src/FactorGraph.jl +++ b/src/FactorGraph.jl @@ -596,7 +596,7 @@ function getDefaultFactorData(dfg::AbstractDFG, edgeIDs = Int[], solveInProgress = 0, inflation::Real=getSolverParams(dfg).inflation, - _blockRecursion::Bool=false ) where T <: FunctorInferenceType + _blockRecursion::Bool=false ) where T <: AbstractFactor # # prepare multihypo particulars @@ -657,7 +657,7 @@ end """ $(SIGNATURES) -Add factor with user defined type <: FunctorInferenceType to the factor graph +Add factor with user defined type `<:AbstractFactor`` to the factor graph object. Define whether the automatic initialization of variables should be performed. Use order sensitive `multihypo` keyword argument to define if any variables are related to data association uncertainty. diff --git a/src/Factors/Mixture.jl b/src/Factors/Mixture.jl index 2d7d08fbe..98d357ab7 100644 --- a/src/Factors/Mixture.jl +++ b/src/Factors/Mixture.jl @@ -33,7 +33,7 @@ mlr = Mixture(LinearRelative, addFactor!(fg, [:x0;:x1], mlr) ``` """ -struct Mixture{N, F<:FunctorInferenceType, S, T<:Tuple} <: FunctorInferenceType +struct Mixture{N, F<:AbstractFactor, S, T<:Tuple} <: AbstractFactor mechanics::F components::NamedTuple{S,T} diversity::Distributions.Categorical @@ -44,21 +44,21 @@ end Mixture(f::Type{F}, z::NamedTuple{S,T}, - c::Distributions.DiscreteNonParametric ) where {F<:FunctorInferenceType, S, T} = Mixture{length(z),F,S,T}(f(LinearAlgebra.I), z, c, size( rand(z[1],1), 1), zeros(Int, 0)) + c::Distributions.DiscreteNonParametric ) where {F<:AbstractFactor, S, T} = Mixture{length(z),F,S,T}(f(LinearAlgebra.I), z, c, size( rand(z[1],1), 1), zeros(Int, 0)) Mixture(f::F, z::NamedTuple{S,T}, - c::Distributions.DiscreteNonParametric ) where {F<:FunctorInferenceType, S, T} = Mixture{length(z),F,S,T}(f, z, c, size( rand(z[1],1), 1), zeros(Int, 0)) + c::Distributions.DiscreteNonParametric ) where {F<:AbstractFactor, S, T} = Mixture{length(z),F,S,T}(f, z, c, size( rand(z[1],1), 1), zeros(Int, 0)) Mixture(f::Union{F,Type{F}},z::NamedTuple{S,T}, - c::AbstractVector{<:Real}) where {F<:FunctorInferenceType,S,T} = Mixture(f, z, Categorical([c...]) ) + c::AbstractVector{<:Real}) where {F<:AbstractFactor,S,T} = Mixture(f, z, Categorical([c...]) ) Mixture(f::Union{F,Type{F}}, z::NamedTuple{S,T}, - c::NTuple{N,<:Real}) where {N,F<:FunctorInferenceType,S,T} = Mixture(f, z, [c...] ) + c::NTuple{N,<:Real}) where {N,F<:AbstractFactor,S,T} = Mixture(f, z, [c...] ) Mixture(f::Union{F,Type{F}}, z::Tuple, - c::Union{<:Distributions.DiscreteNonParametric, <:AbstractVector{<:Real}, <:NTuple{N,<:Real}} ) where {F<:FunctorInferenceType, N} = Mixture(f,NamedTuple{_defaultNamesMixtures(length(z))}(z), c ) + c::Union{<:Distributions.DiscreteNonParametric, <:AbstractVector{<:Real}, <:NTuple{N,<:Real}} ) where {F<:AbstractFactor, N} = Mixture(f,NamedTuple{_defaultNamesMixtures(length(z))}(z), c ) Mixture(f::Union{F,Type{F}}, z::AbstractVector{<:SamplableBelief}, - c::Union{<:Distributions.DiscreteNonParametric, <:AbstractVector{<:Real}, <:NTuple{N,<:Real}} ) where {F <: FunctorInferenceType, N} = Mixture(f,(z...,), c ) + c::Union{<:Distributions.DiscreteNonParametric, <:AbstractVector{<:Real}, <:NTuple{N,<:Real}} ) where {F <: AbstractFactor, N} = Mixture(f,(z...,), c ) function Base.resize!(mp::Mixture, s::Int) diff --git a/src/Flux/FluxModelsDistribution.jl b/src/Flux/FluxModelsDistribution.jl index 58b62065f..693135aa1 100644 --- a/src/Flux/FluxModelsDistribution.jl +++ b/src/Flux/FluxModelsDistribution.jl @@ -112,7 +112,7 @@ Related Mixture, FluxModelsDistribution """ -function MixtureFluxModels( F_::FunctorInferenceType, +function MixtureFluxModels( F_::AbstractFactor, nnModels::Vector{P}, inDim::NTuple{ID,Int}, data::D, @@ -147,7 +147,7 @@ end MixtureFluxModels(::Type{F}, w...; - kw...) where F <: FunctorInferenceType = MixtureFluxModels(F(LinearAlgebra.I),w...;kw...) + kw...) where F <: AbstractFactor = MixtureFluxModels(F(LinearAlgebra.I),w...;kw...) diff --git a/src/Serialization/entities/SerializingDistributions.jl b/src/Serialization/entities/SerializingDistributions.jl index f8cc3c4b7..b26969bf7 100644 --- a/src/Serialization/entities/SerializingDistributions.jl +++ b/src/Serialization/entities/SerializingDistributions.jl @@ -53,6 +53,9 @@ Base.@kwdef struct PackedFullNormal <: PackedSamplableBelief cov::Vector{Float64} = ones(1) end - +Base.@kwdef struct PackedRayleigh <: PackedSamplableBelief + _type::String = "IncrementalInference.PackedRayleigh" + sigma::Float64 = 1.0 +end # \ No newline at end of file diff --git a/src/Serialization/services/SerializingDistributions.jl b/src/Serialization/services/SerializingDistributions.jl index cd42b91bd..75e3c5e4f 100644 --- a/src/Serialization/services/SerializingDistributions.jl +++ b/src/Serialization/services/SerializingDistributions.jl @@ -9,6 +9,7 @@ packDistribution(dtr::ZeroMeanDiagNormal) = PackedZeroMeanDiagNormal(; diag=dtr. packDistribution(dtr::ZeroMeanFullNormal) = PackedZeroMeanFullNormal(; cov=dtr.Σ.mat[:] ) packDistribution(dtr::DiagNormal) = PackedDiagNormal(; mu=dtr.μ, diag=dtr.Σ.diag ) packDistribution(dtr::FullNormal) = PackedFullNormal(; mu=dtr.μ, cov=dtr.Σ.mat[:] ) +packDistribution(dtr::Rayleigh) = PackedRayleigh(; sigma=dtr.σ ) packDistribution(dtr::AliasingScalarSampler) = PackedAliasingScalarSampler(; domain=dtr.domain, weights=dtr.weights.values ) @@ -58,10 +59,11 @@ packDistribution(dtr::LevelSetGridNormal) = PackedLevelSetGridNormal( "Increment unpackDistribution(dtr::PackedCategorical) = Categorical( dtr.p ./ sum(dtr.p) ) unpackDistribution(dtr::PackedUniform) = Uniform(dtr.a, dtr.b ) unpackDistribution(dtr::PackedNormal) = Normal( dtr.mu, dtr.sigma ) -unpackDistribution(dtr::PackedZeroMeanDiagNormal) = MvNormal( sqrt.(dtr.diag) ) +unpackDistribution(dtr::PackedZeroMeanDiagNormal) = MvNormal( LinearAlgebra.Diagonal(map(abs2, sqrt.(dtr.diag))) ) # sqrt.(dtr.diag) unpackDistribution(dtr::PackedZeroMeanFullNormal) = MvNormal( reshape(dtr.cov, length(dtr.mu), :) ) unpackDistribution(dtr::PackedDiagNormal) = MvNormal( dtr.mu, sqrt.(dtr.diag) ) unpackDistribution(dtr::PackedFullNormal) = MvNormal( dtr.mu, reshape(dtr.cov, length(dtr.mu), :) ) +unpackDistribution(dtr::PackedRayleigh) = Rayleigh( dtr.sigma ) unpackDistribution(dtr::PackedAliasingScalarSampler) = AliasingScalarSampler( dtr.domain, dtr.weights ./ sum(dtr.weights) ) diff --git a/src/Variables/DefaultVariables.jl b/src/Variables/DefaultVariables.jl index 1de2d8197..96d410bf8 100644 --- a/src/Variables/DefaultVariables.jl +++ b/src/Variables/DefaultVariables.jl @@ -2,37 +2,41 @@ ## Euclid 1 -""" -$(TYPEDEF) -Most basic continuous scalar variable in a `::DFG.AbstractDFG` object. -DevNotes -- TODO Consolidate with ContinuousEuclid{1} """ -@defVariable ContinuousScalar TranslationGroup(1) [0.0;] - + $TYPEDEF - -""" - ContinuousEuclid{N} -Continuous Euclidean variable of dimension `N`. +Continuous Euclidean variable of dimension `N` representing a Position in cartesian space. """ -struct ContinuousEuclid{N} <: InferenceVariable end +struct Position{N} <: InferenceVariable end -ContinuousEuclid(x::Int) = ContinuousEuclid{x}() +Position(N::Int) = Position{N}() # not sure if these overloads are necessary since DFG 775? -DFG.getManifold(::InstanceType{ContinuousEuclid{N}}) where N = TranslationGroup(N) -DFG.getDimension(val::InstanceType{ContinuousEuclid{N}}) where N = manifold_dimension(getManifold(val)) +DFG.getManifold(::InstanceType{Position{N}}) where N = TranslationGroup(N) +DFG.getDimension(val::InstanceType{Position{N}}) where N = manifold_dimension(getManifold(val)) +DFG.getPointType(::Type{Position{N}}) where N = Vector{Float64} +DFG.getPointIdentity(M_::Type{Position{N}}) where N = zeros(N) # identity_element(getManifold(M_), zeros(N)) +Base.convert(::Type{<:ManifoldsBase.AbstractManifold}, ::InstanceType{Position{N}}) where N = TranslationGroup(N) -DFG.getPointType(::Type{ContinuousEuclid{N}}) where N = Vector{Float64} -DFG.getPointIdentity(M_::Type{ContinuousEuclid{N}}) where N = zeros(N) # identity_element(getManifold(M_), zeros(N)) +# +""" +$(TYPEDEF) + +Most basic continuous scalar variable in a `::DFG.AbstractDFG` object. -Base.convert(::Type{<:ManifoldsBase.AbstractManifold}, ::InstanceType{ContinuousEuclid{N}}) where N = TranslationGroup(N) +Alias of `Position{1}` +""" +const ContinuousScalar = Position{1} +const ContinuousEuclid{N} = Position{N} +const Position1 = Position{1} +const Position2 = Position{2} +const Position3 = Position{3} +const Position4 = Position{4} ## Circular @@ -45,6 +49,4 @@ Circular is a `Manifolds.Circle{ℝ}` mechanization of one rotation, with `theta @defVariable Circular RealCircleGroup() [0.0;] - - # \ No newline at end of file diff --git a/src/entities/FactorOperationalMemory.jl b/src/entities/FactorOperationalMemory.jl index 76fb23965..03734561e 100644 --- a/src/entities/FactorOperationalMemory.jl +++ b/src/entities/FactorOperationalMemory.jl @@ -1,7 +1,3 @@ -import Base: convert -import Base: == - -export CalcFactor """ @@ -12,13 +8,12 @@ User factor interface method for computing the residual values of factors. Notes - Also see #467 on API consolidation -```juila +```julia function (cf::CalcFactor{<:LinearRelative})(res::AbstractVector{<:Real}, z, xi, xj) cf.metadata.variablelist cf.metadata.targetvariable - cf.metadata.usercache - generic on-manifold residual function - + cf.cache + # generic on-manifold residual function return distance(z, distance(xj, xi)) end ``` @@ -108,6 +103,7 @@ mutable struct FactorMetadata{FV<:AbstractVector{<:DFGVariable}, # label of which variable is being solved for solvefor::Symbol # for type specific user data, see (? #784) + # OBSOLETE? Replaced by CalcFactor.cache cachedata::CD end diff --git a/src/services/EvalFactor.jl b/src/services/EvalFactor.jl index 9166b3d97..812e0ba5f 100644 --- a/src/services/EvalFactor.jl +++ b/src/services/EvalFactor.jl @@ -299,7 +299,7 @@ end Multiple dispatch wrapper for `<:AbstractRelativeRoots` types, to prepare and execute the general approximate convolution with user defined factor residual functions. This method also supports multihypothesis operations as one mechanism to introduce new modality into the proposal beliefs. -Planned changes will fold null hypothesis in as a standard feature and no longer appear as a separate `InferenceType`. +Planned changes will fold null hypothesis in as a standard feature and no longer appear as a separate `InferenceVariable`. """ function evalPotentialSpecific( Xi::AbstractVector{<:DFGVariable}, ccwl::CommonConvWrapper{T}, diff --git a/test/testPackedDistributions.jl b/test/testPackedDistributions.jl index 2dec109dd..6ed4f79d7 100644 --- a/test/testPackedDistributions.jl +++ b/test/testPackedDistributions.jl @@ -161,6 +161,24 @@ upck = unpackDistribution(packed) end +@testset "Packing of Rayleigh" begin +## + +r = Rayleigh(1.1) +r_ = packDistribution(r) + +@test r_ isa PackedSamplableBelief +@test r_ isa PackedRayleigh + +r__ = unpackDistribution(r_) + +@test r__ isa Rayleigh +@test isapprox(r.σ, r__.σ) + +## +end + + ## Legacy tests # @testset "hard-coded test of PackedPrior to Prior" begin