Merge branch 'hybrid/pruning' into feature/nonlinear-hybrid
commit
db569091f2
|
|
@ -30,11 +30,14 @@ namespace gtsam {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief A conditional of gaussian mixtures indexed by discrete variables, as
|
* @brief A conditional of gaussian mixtures indexed by discrete variables, as
|
||||||
* part of a Bayes Network.
|
* part of a Bayes Network. This is the result of the elimination of a
|
||||||
|
* continuous variable in a hybrid scheme, such that the remaining variables are
|
||||||
|
* discrete+continuous.
|
||||||
*
|
*
|
||||||
* Represents the conditional density P(X | M, Z) where X is a continuous random
|
* Represents the conditional density P(X | M, Z) where X is the set of
|
||||||
* variable, M is the selection of discrete variables corresponding to a subset
|
* continuous random variables, M is the selection of discrete variables
|
||||||
* of the Gaussian variables and Z is parent of this node
|
* corresponding to a subset of the Gaussian variables and Z is parent of this
|
||||||
|
* node .
|
||||||
*
|
*
|
||||||
* The probability P(x|y,z,...) is proportional to
|
* The probability P(x|y,z,...) is proportional to
|
||||||
* \f$ \sum_i k_i \exp - \frac{1}{2} |R_i x - (d_i - S_i y - T_i z - ...)|^2 \f$
|
* \f$ \sum_i k_i \exp - \frac{1}{2} |R_i x - (d_i - S_i y - T_i z - ...)|^2 \f$
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,123 @@
|
||||||
* @file HybridBayesNet.cpp
|
* @file HybridBayesNet.cpp
|
||||||
* @brief A bayes net of Gaussian Conditionals indexed by discrete keys.
|
* @brief A bayes net of Gaussian Conditionals indexed by discrete keys.
|
||||||
* @author Fan Jiang
|
* @author Fan Jiang
|
||||||
|
* @author Varun Agrawal
|
||||||
* @date January 2022
|
* @date January 2022
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
|
||||||
|
namespace gtsam {
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
/// Return the DiscreteKey vector as a set.
|
||||||
|
static std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys) {
|
||||||
|
std::set<DiscreteKey> s;
|
||||||
|
s.insert(dkeys.begin(), dkeys.end());
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
HybridBayesNet HybridBayesNet::prune(
|
||||||
|
const DecisionTreeFactor::shared_ptr &discreteFactor) const {
|
||||||
|
/* To Prune, we visitWith every leaf in the GaussianMixture.
|
||||||
|
* For each leaf, using the assignment we can check the discrete decision tree
|
||||||
|
* for 0.0 probability, then just set the leaf to a nullptr.
|
||||||
|
*
|
||||||
|
* We can later check the GaussianMixture for just nullptrs.
|
||||||
|
*/
|
||||||
|
|
||||||
|
HybridBayesNet prunedBayesNetFragment;
|
||||||
|
|
||||||
|
// Functional which loops over all assignments and create a set of
|
||||||
|
// GaussianConditionals
|
||||||
|
auto pruner = [&](const Assignment<Key> &choices,
|
||||||
|
const GaussianConditional::shared_ptr &conditional)
|
||||||
|
-> GaussianConditional::shared_ptr {
|
||||||
|
// typecast so we can use this to get probability value
|
||||||
|
DiscreteValues values(choices);
|
||||||
|
|
||||||
|
if ((*discreteFactor)(values) == 0.0) {
|
||||||
|
// empty aka null pointer
|
||||||
|
boost::shared_ptr<GaussianConditional> null;
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return conditional;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Go through all the conditionals in the
|
||||||
|
// Bayes Net and prune them as per discreteFactor.
|
||||||
|
for (size_t i = 0; i < this->size(); i++) {
|
||||||
|
HybridConditional::shared_ptr conditional = this->at(i);
|
||||||
|
|
||||||
|
GaussianMixture::shared_ptr gaussianMixture =
|
||||||
|
boost::dynamic_pointer_cast<GaussianMixture>(conditional->inner());
|
||||||
|
|
||||||
|
if (gaussianMixture) {
|
||||||
|
// We may have mixtures with less discrete keys than discreteFactor so we
|
||||||
|
// skip those since the label assignment does not exist.
|
||||||
|
auto gmKeySet = DiscreteKeysAsSet(gaussianMixture->discreteKeys());
|
||||||
|
auto dfKeySet = DiscreteKeysAsSet(discreteFactor->discreteKeys());
|
||||||
|
if (gmKeySet != dfKeySet) {
|
||||||
|
// Add the gaussianMixture which doesn't have to be pruned.
|
||||||
|
prunedBayesNetFragment.push_back(
|
||||||
|
boost::make_shared<HybridConditional>(gaussianMixture));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the pruning to get a new, pruned tree
|
||||||
|
GaussianMixture::Conditionals prunedTree =
|
||||||
|
gaussianMixture->conditionals().apply(pruner);
|
||||||
|
|
||||||
|
DiscreteKeys discreteKeys = gaussianMixture->discreteKeys();
|
||||||
|
// reverse keys to get a natural ordering
|
||||||
|
std::reverse(discreteKeys.begin(), discreteKeys.end());
|
||||||
|
|
||||||
|
// Convert from boost::iterator_range to KeyVector
|
||||||
|
// so we can pass it to constructor.
|
||||||
|
KeyVector frontals(gaussianMixture->frontals().begin(),
|
||||||
|
gaussianMixture->frontals().end()),
|
||||||
|
parents(gaussianMixture->parents().begin(),
|
||||||
|
gaussianMixture->parents().end());
|
||||||
|
|
||||||
|
// Create the new gaussian mixture and add it to the bayes net.
|
||||||
|
auto prunedGaussianMixture = boost::make_shared<GaussianMixture>(
|
||||||
|
frontals, parents, discreteKeys, prunedTree);
|
||||||
|
|
||||||
|
// Type-erase and add to the pruned Bayes Net fragment.
|
||||||
|
prunedBayesNetFragment.push_back(
|
||||||
|
boost::make_shared<HybridConditional>(prunedGaussianMixture));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Add the non-GaussianMixture conditional
|
||||||
|
prunedBayesNetFragment.push_back(conditional);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return prunedBayesNetFragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
GaussianMixture::shared_ptr HybridBayesNet::atGaussian(size_t i) const {
|
||||||
|
return boost::dynamic_pointer_cast<GaussianMixture>(factors_.at(i)->inner());
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const {
|
||||||
|
return boost::dynamic_pointer_cast<DiscreteConditional>(
|
||||||
|
factors_.at(i)->inner());
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
GaussianBayesNet HybridBayesNet::choose(
|
||||||
|
const DiscreteValues &assignment) const {
|
||||||
|
GaussianBayesNet gbn;
|
||||||
|
for (size_t idx = 0; idx < size(); idx++) {
|
||||||
|
GaussianMixture gm = *this->atGaussian(idx);
|
||||||
|
gbn.push_back(gm(assignment));
|
||||||
|
}
|
||||||
|
return gbn;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace gtsam
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,10 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
||||||
#include <gtsam/hybrid/HybridConditional.h>
|
#include <gtsam/hybrid/HybridConditional.h>
|
||||||
#include <gtsam/inference/BayesNet.h>
|
#include <gtsam/inference/BayesNet.h>
|
||||||
|
#include <gtsam/linear/GaussianBayesNet.h>
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
|
@ -36,6 +38,34 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
||||||
|
|
||||||
/** Construct empty bayes net */
|
/** Construct empty bayes net */
|
||||||
HybridBayesNet() = default;
|
HybridBayesNet() = default;
|
||||||
|
|
||||||
|
/// Prune the Hybrid Bayes Net given the discrete decision tree.
|
||||||
|
HybridBayesNet prune(
|
||||||
|
const DecisionTreeFactor::shared_ptr &discreteFactor) const;
|
||||||
|
|
||||||
|
/// Add HybridConditional to Bayes Net
|
||||||
|
using Base::add;
|
||||||
|
|
||||||
|
/// Add a discrete conditional to the Bayes Net.
|
||||||
|
void add(const DiscreteKey &key, const std::string &table) {
|
||||||
|
push_back(
|
||||||
|
HybridConditional(boost::make_shared<DiscreteConditional>(key, table)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a specific Gaussian mixture by index `i`.
|
||||||
|
GaussianMixture::shared_ptr atGaussian(size_t i) const;
|
||||||
|
|
||||||
|
/// Get a specific discrete conditional by index `i`.
|
||||||
|
DiscreteConditional::shared_ptr atDiscrete(size_t i) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Get the Gaussian Bayes Net which corresponds to a specific discrete
|
||||||
|
* value assignment.
|
||||||
|
*
|
||||||
|
* @param assignment The discrete value assignment for the discrete keys.
|
||||||
|
* @return GaussianBayesNet
|
||||||
|
*/
|
||||||
|
GaussianBayesNet choose(const DiscreteValues &assignment) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
||||||
|
|
@ -78,7 +78,8 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const {
|
||||||
const This *e = dynamic_cast<const This *>(&lf);
|
const This *e = dynamic_cast<const This *>(&lf);
|
||||||
return e != nullptr && Base::equals(*e, tol) &&
|
return e != nullptr && Base::equals(*e, tol) &&
|
||||||
isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ &&
|
isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ &&
|
||||||
isHybrid_ == e->isHybrid_ && nrContinuous_ == e->nrContinuous_;
|
isHybrid_ == e->isHybrid_ && continuousKeys_ == e->continuousKeys_ &&
|
||||||
|
discreteKeys_ == e->discreteKeys_;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
|
|
|
||||||
|
|
@ -50,8 +50,8 @@ class GTSAM_EXPORT HybridFactor : public Factor {
|
||||||
size_t nrContinuous_ = 0;
|
size_t nrContinuous_ = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
// Set of DiscreteKeys for this factor.
|
||||||
DiscreteKeys discreteKeys_;
|
DiscreteKeys discreteKeys_;
|
||||||
|
|
||||||
/// Record continuous keys for book-keeping
|
/// Record continuous keys for book-keeping
|
||||||
KeyVector continuousKeys_;
|
KeyVector continuousKeys_;
|
||||||
|
|
||||||
|
|
@ -120,10 +120,13 @@ class GTSAM_EXPORT HybridFactor : public Factor {
|
||||||
bool isHybrid() const { return isHybrid_; }
|
bool isHybrid() const { return isHybrid_; }
|
||||||
|
|
||||||
/// Return the number of continuous variables in this factor.
|
/// Return the number of continuous variables in this factor.
|
||||||
size_t nrContinuous() const { return nrContinuous_; }
|
size_t nrContinuous() const { return continuousKeys_.size(); }
|
||||||
|
|
||||||
/// Return vector of discrete keys.
|
/// Return the discrete keys for this factor.
|
||||||
DiscreteKeys discreteKeys() const { return discreteKeys_; }
|
const DiscreteKeys &discreteKeys() const { return discreteKeys_; }
|
||||||
|
|
||||||
|
/// Return only the continuous keys for this factor.
|
||||||
|
const KeyVector &continuousKeys() const { return continuousKeys_; }
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @file HybridFactorGraph.h
|
* @file HybridFactorGraph.h
|
||||||
* @brief Nonlinear hybrid factor graph that uses type erasure
|
* @brief Hybrid factor graph base class that uses type erasure
|
||||||
* @author Varun Agrawal
|
* @author Varun Agrawal
|
||||||
* @date May 28, 2022
|
* @date May 28, 2022
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -56,10 +56,9 @@ struct EliminationTraits<HybridGaussianFactorGraph> {
|
||||||
typedef HybridBayesNet
|
typedef HybridBayesNet
|
||||||
BayesNetType; ///< Type of Bayes net from sequential elimination
|
BayesNetType; ///< Type of Bayes net from sequential elimination
|
||||||
typedef HybridEliminationTree
|
typedef HybridEliminationTree
|
||||||
EliminationTreeType; ///< Type of elimination tree
|
EliminationTreeType; ///< Type of elimination tree
|
||||||
typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree
|
typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree
|
||||||
typedef HybridJunctionTree
|
typedef HybridJunctionTree JunctionTreeType; ///< Type of Junction tree
|
||||||
JunctionTreeType; ///< Type of Junction tree
|
|
||||||
/// The default dense elimination function
|
/// The default dense elimination function
|
||||||
static std::pair<boost::shared_ptr<ConditionalType>,
|
static std::pair<boost::shared_ptr<ConditionalType>,
|
||||||
boost::shared_ptr<FactorType> >
|
boost::shared_ptr<FactorType> >
|
||||||
|
|
@ -136,8 +135,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
|
||||||
template <typename FACTOR>
|
template <typename FACTOR>
|
||||||
IsGaussian<FACTOR> push_gaussian(
|
IsGaussian<FACTOR> push_gaussian(
|
||||||
const boost::shared_ptr<FACTOR>& gaussianFactor) {
|
const boost::shared_ptr<FACTOR>& gaussianFactor) {
|
||||||
Base::Base::push_back(
|
Base::push_back(boost::make_shared<HybridGaussianFactor>(gaussianFactor));
|
||||||
boost::make_shared<HybridGaussianFactor>(gaussianFactor));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a factor and add (shared pointer to it) to factor graph.
|
/// Construct a factor and add (shared pointer to it) to factor graph.
|
||||||
|
|
|
||||||
|
|
@ -33,11 +33,21 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
using gtsam::symbol_shorthand::C;
|
|
||||||
using gtsam::symbol_shorthand::X;
|
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
|
using symbol_shorthand::C;
|
||||||
|
using symbol_shorthand::M;
|
||||||
|
using symbol_shorthand::X;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Create a switching system chain. A switching system is a continuous
|
||||||
|
* system which depends on a discrete mode at each time step of the chain.
|
||||||
|
*
|
||||||
|
* @param n The number of chain elements.
|
||||||
|
* @param keyFunc The functional to help specify the continuous key.
|
||||||
|
* @param dKeyFunc The functional to help specify the discrete key.
|
||||||
|
* @return HybridGaussianFactorGraph::shared_ptr
|
||||||
|
*/
|
||||||
inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
||||||
size_t n, std::function<Key(int)> keyFunc = X,
|
size_t n, std::function<Key(int)> keyFunc = X,
|
||||||
std::function<Key(int)> dKeyFunc = C) {
|
std::function<Key(int)> dKeyFunc = C) {
|
||||||
|
|
@ -63,9 +73,19 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
||||||
return boost::make_shared<HybridGaussianFactorGraph>(std::move(hfg));
|
return boost::make_shared<HybridGaussianFactorGraph>(std::move(hfg));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Return the ordering as a binary tree such that all parent nodes are
|
||||||
|
* above their children.
|
||||||
|
*
|
||||||
|
* This will result in a nested dissection Bayes tree after elimination.
|
||||||
|
*
|
||||||
|
* @param input The original ordering.
|
||||||
|
* @return std::pair<KeyVector, std::vector<int>>
|
||||||
|
*/
|
||||||
inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering(
|
inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering(
|
||||||
std::vector<Key> &input) {
|
std::vector<Key> &input) {
|
||||||
KeyVector new_order;
|
KeyVector new_order;
|
||||||
|
|
||||||
std::vector<int> levels(input.size());
|
std::vector<int> levels(input.size());
|
||||||
std::function<void(std::vector<Key>::iterator, std::vector<Key>::iterator,
|
std::function<void(std::vector<Key>::iterator, std::vector<Key>::iterator,
|
||||||
int)>
|
int)>
|
||||||
|
|
@ -88,7 +108,7 @@ inline std::pair<KeyVector, std::vector<int>> makeBinaryOrdering(
|
||||||
|
|
||||||
bsg(input.begin(), input.end(), 0);
|
bsg(input.begin(), input.end(), 0);
|
||||||
std::reverse(new_order.begin(), new_order.end());
|
std::reverse(new_order.begin(), new_order.end());
|
||||||
// std::reverse(levels.begin(), levels.end());
|
|
||||||
return {new_order, levels};
|
return {new_order, levels};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -58,26 +58,30 @@ using gtsam::symbol_shorthand::X;
|
||||||
using gtsam::symbol_shorthand::Y;
|
using gtsam::symbol_shorthand::Y;
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(HybridGaussianFactorGraph, creation) {
|
TEST(HybridGaussianFactorGraph, Creation) {
|
||||||
HybridConditional test;
|
HybridConditional conditional;
|
||||||
|
|
||||||
HybridGaussianFactorGraph hfg;
|
HybridGaussianFactorGraph hfg;
|
||||||
|
|
||||||
hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1)));
|
hfg.add(HybridGaussianFactor(JacobianFactor(X(0), I_3x3, Z_3x1)));
|
||||||
|
|
||||||
GaussianMixture clgc(
|
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
|
||||||
{X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}),
|
// graph
|
||||||
GaussianMixture::Conditionals(
|
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}),
|
||||||
C(0),
|
GaussianMixture::Conditionals(
|
||||||
boost::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1),
|
C(0),
|
||||||
I_3x3),
|
boost::make_shared<GaussianConditional>(
|
||||||
boost::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3,
|
X(0), Z_3x1, I_3x3, X(1), I_3x3),
|
||||||
X(1), I_3x3)));
|
boost::make_shared<GaussianConditional>(
|
||||||
GTSAM_PRINT(clgc);
|
X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)));
|
||||||
|
hfg.add(gm);
|
||||||
|
|
||||||
|
EXPECT_LONGS_EQUAL(2, hfg.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(HybridGaussianFactorGraph, eliminate) {
|
TEST(HybridGaussianFactorGraph, EliminateSequential) {
|
||||||
|
// Test elimination of a single variable.
|
||||||
HybridGaussianFactorGraph hfg;
|
HybridGaussianFactorGraph hfg;
|
||||||
|
|
||||||
hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1)));
|
hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1)));
|
||||||
|
|
@ -88,11 +92,13 @@ TEST(HybridGaussianFactorGraph, eliminate) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(HybridGaussianFactorGraph, eliminateMultifrontal) {
|
TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
|
||||||
|
// Test multifrontal elimination
|
||||||
HybridGaussianFactorGraph hfg;
|
HybridGaussianFactorGraph hfg;
|
||||||
|
|
||||||
DiscreteKey c(C(1), 2);
|
DiscreteKey c(C(1), 2);
|
||||||
|
|
||||||
|
// Add priors on x0 and c1
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8})));
|
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8})));
|
||||||
|
|
||||||
|
|
@ -110,9 +116,12 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
|
||||||
|
|
||||||
DiscreteKey c1(C(1), 2);
|
DiscreteKey c1(C(1), 2);
|
||||||
|
|
||||||
|
// Add prior on x0
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
|
// Add factor between x0 and x1
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
|
// Add a gaussian mixture factor ϕ(x1, c1)
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||||
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
|
|
@ -134,26 +143,26 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
|
||||||
|
|
||||||
DiscreteKey c1(C(1), 2);
|
DiscreteKey c1(C(1), 2);
|
||||||
|
|
||||||
|
// Add prior on x0
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
|
// Add factor between x0 and x1
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||||
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
|
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
|
||||||
// hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt));
|
|
||||||
|
// Discrete probability table for c1
|
||||||
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8})));
|
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8})));
|
||||||
|
// Joint discrete probability table for c1, c2
|
||||||
hfg.add(HybridDiscreteFactor(
|
hfg.add(HybridDiscreteFactor(
|
||||||
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")));
|
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")));
|
||||||
// hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1
|
|
||||||
// 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2},
|
|
||||||
// {C(1), 2}}, "1 2 2 1")));
|
|
||||||
|
|
||||||
auto result = hfg.eliminateSequential(
|
auto result = hfg.eliminateSequential(
|
||||||
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)}));
|
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)}));
|
||||||
|
|
||||||
GTSAM_PRINT(*result);
|
EXPECT_LONGS_EQUAL(4, result->size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -165,28 +174,19 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
// DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
|
||||||
// C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
|
||||||
// boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
|
||||||
|
|
||||||
// hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
|
|
||||||
hfg.add(GaussianMixtureFactor::FromFactors(
|
hfg.add(GaussianMixtureFactor::FromFactors(
|
||||||
{X(1)}, {{C(1), 2}},
|
{X(1)}, {{C(1), 2}},
|
||||||
{boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
{boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
||||||
|
|
||||||
// hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt));
|
|
||||||
hfg.add(DecisionTreeFactor(c1, {2, 8}));
|
hfg.add(DecisionTreeFactor(c1, {2, 8}));
|
||||||
hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"));
|
hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"));
|
||||||
// hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1
|
|
||||||
// 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2},
|
|
||||||
// {C(1), 2}}, "1 2 2 1")));
|
|
||||||
|
|
||||||
auto result = hfg.eliminateMultifrontal(
|
auto result = hfg.eliminateMultifrontal(
|
||||||
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)}));
|
Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)}));
|
||||||
|
|
||||||
GTSAM_PRINT(*result);
|
// GTSAM_PRINT(*result);
|
||||||
GTSAM_PRINT(*result->marginalFactor(C(2)));
|
// GTSAM_PRINT(*result->marginalFactor(C(2)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -195,30 +195,28 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
|
||||||
|
|
||||||
DiscreteKey c(C(1), 2);
|
DiscreteKey c(C(1), 2);
|
||||||
|
|
||||||
|
// Prior on x0
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||||
|
// Factor between x0-x1
|
||||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
|
// Decision tree with different modes on x1
|
||||||
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
||||||
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
||||||
|
|
||||||
|
// Hybrid factor P(x1|c1)
|
||||||
hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt));
|
hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt));
|
||||||
|
// Prior factor on c1
|
||||||
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8})));
|
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8})));
|
||||||
// hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1
|
|
||||||
// 2 3 4")));
|
|
||||||
|
|
||||||
|
// Get a constrained ordering keeping c1 last
|
||||||
auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)});
|
auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)});
|
||||||
|
|
||||||
|
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
|
||||||
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
|
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
|
||||||
|
|
||||||
GTSAM_PRINT(*hbt);
|
EXPECT_LONGS_EQUAL(3, hbt->size());
|
||||||
/*
|
|
||||||
Explanation: the Junction tree will need to reeliminate to get to the marginal
|
|
||||||
on X(1), which is not possible because it involves eliminating discrete before
|
|
||||||
continuous. The solution to this, however, is in Murphy02. TLDR is that this
|
|
||||||
is 1. expensive and 2. inexact. neverless it is doable. And I believe that we
|
|
||||||
should do this.
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -233,10 +231,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
||||||
|
|
||||||
{
|
{
|
||||||
// DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
|
||||||
// C(0), boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
|
||||||
// boost::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()));
|
|
||||||
|
|
||||||
hfg.add(GaussianMixtureFactor::FromFactors(
|
hfg.add(GaussianMixtureFactor::FromFactors(
|
||||||
{X(0)}, {{C(0), 2}},
|
{X(0)}, {{C(0), 2}},
|
||||||
{boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
{boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
||||||
|
|
@ -249,7 +243,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1));
|
hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1));
|
||||||
}
|
}
|
||||||
|
|
||||||
// hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8})));
|
|
||||||
hfg.add(HybridDiscreteFactor(
|
hfg.add(HybridDiscreteFactor(
|
||||||
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")));
|
DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")));
|
||||||
|
|
||||||
|
|
@ -273,26 +266,37 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
||||||
auto ordering_full =
|
auto ordering_full =
|
||||||
Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)});
|
Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)});
|
||||||
|
|
||||||
GTSAM_PRINT(ordering_full);
|
|
||||||
|
|
||||||
HybridBayesTree::shared_ptr hbt;
|
HybridBayesTree::shared_ptr hbt;
|
||||||
HybridGaussianFactorGraph::shared_ptr remaining;
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
||||||
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);
|
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);
|
||||||
|
|
||||||
GTSAM_PRINT(*hbt);
|
// GTSAM_PRINT(*hbt);
|
||||||
|
// GTSAM_PRINT(*remaining);
|
||||||
|
|
||||||
GTSAM_PRINT(*remaining);
|
// hbt->marginalFactor(X(1))->print("HBT: ");
|
||||||
|
|
||||||
hbt->dot(std::cout);
|
|
||||||
/*
|
/*
|
||||||
Explanation: the Junction tree will need to reeliminate to get to the marginal
|
(Fan) Explanation: the Junction tree will need to reeliminate to get to the
|
||||||
on X(1), which is not possible because it involves eliminating discrete before
|
marginal on X(1), which is not possible because it involves eliminating
|
||||||
continuous. The solution to this, however, is in Murphy02. TLDR is that this
|
discrete before continuous. The solution to this, however, is in Murphy02.
|
||||||
is 1. expensive and 2. inexact. neverless it is doable. And I believe that we
|
TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable.
|
||||||
should do this.
|
And I believe that we should do this.
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg,
|
||||||
|
const HybridBayesTree::shared_ptr &hbt,
|
||||||
|
const Ordering &ordering) {
|
||||||
|
DotWriter dw;
|
||||||
|
dw.positionHints['c'] = 2;
|
||||||
|
dw.positionHints['x'] = 1;
|
||||||
|
std::cout << hfg->dot(DefaultKeyFormatter, dw);
|
||||||
|
std::cout << "\n";
|
||||||
|
hbt->dot(std::cout);
|
||||||
|
|
||||||
|
std::cout << "\n";
|
||||||
|
std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw);
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// TODO(fan): make a graph like Varun's paper one
|
// TODO(fan): make a graph like Varun's paper one
|
||||||
TEST(HybridGaussianFactorGraph, Switching) {
|
TEST(HybridGaussianFactorGraph, Switching) {
|
||||||
|
|
@ -326,9 +330,6 @@ TEST(HybridGaussianFactorGraph, Switching) {
|
||||||
for (auto &l : lvls) {
|
for (auto &l : lvls) {
|
||||||
l = -l;
|
l = -l;
|
||||||
}
|
}
|
||||||
std::copy(lvls.begin(), lvls.end(),
|
|
||||||
std::ostream_iterator<int>(std::cout, ","));
|
|
||||||
std::cout << "\n";
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
std::vector<int> naturalC(N - 1);
|
std::vector<int> naturalC(N - 1);
|
||||||
|
|
@ -342,63 +343,19 @@ TEST(HybridGaussianFactorGraph, Switching) {
|
||||||
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
||||||
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
||||||
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
||||||
std::copy(lvls.begin(), lvls.end(),
|
|
||||||
std::ostream_iterator<int>(std::cout, ","));
|
|
||||||
}
|
}
|
||||||
auto ordering_full = Ordering(ordering);
|
auto ordering_full = Ordering(ordering);
|
||||||
|
|
||||||
// auto ordering_full =
|
|
||||||
// Ordering();
|
|
||||||
|
|
||||||
// for (int i = 1; i <= 9; i++) {
|
|
||||||
// ordering_full.push_back(X(i));
|
|
||||||
// }
|
|
||||||
|
|
||||||
// for (int i = 1; i < 9; i++) {
|
|
||||||
// ordering_full.push_back(C(i));
|
|
||||||
// }
|
|
||||||
|
|
||||||
// auto ordering_full =
|
|
||||||
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
|
|
||||||
// X(5),
|
|
||||||
// C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8)});
|
|
||||||
|
|
||||||
// GTSAM_PRINT(*hfg);
|
// GTSAM_PRINT(*hfg);
|
||||||
GTSAM_PRINT(ordering_full);
|
// GTSAM_PRINT(ordering_full);
|
||||||
|
|
||||||
HybridBayesTree::shared_ptr hbt;
|
HybridBayesTree::shared_ptr hbt;
|
||||||
HybridGaussianFactorGraph::shared_ptr remaining;
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
||||||
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
||||||
|
|
||||||
// GTSAM_PRINT(*hbt);
|
// GTSAM_PRINT(*hbt);
|
||||||
|
|
||||||
// GTSAM_PRINT(*remaining);
|
// GTSAM_PRINT(*remaining);
|
||||||
|
// hbt->marginalFactor(C(11))->print("HBT: ");
|
||||||
{
|
|
||||||
DotWriter dw;
|
|
||||||
dw.positionHints['c'] = 2;
|
|
||||||
dw.positionHints['x'] = 1;
|
|
||||||
std::cout << hfg->dot(DefaultKeyFormatter, dw);
|
|
||||||
std::cout << "\n";
|
|
||||||
hbt->dot(std::cout);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
DotWriter dw;
|
|
||||||
// dw.positionHints['c'] = 2;
|
|
||||||
// dw.positionHints['x'] = 1;
|
|
||||||
std::cout << "\n";
|
|
||||||
std::cout << hfg->eliminateSequential(ordering_full)
|
|
||||||
->dot(DefaultKeyFormatter, dw);
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
Explanation: the Junction tree will need to reeliminate to get to the marginal
|
|
||||||
on X(1), which is not possible because it involves eliminating discrete before
|
|
||||||
continuous. The solution to this, however, is in Murphy02. TLDR is that this
|
|
||||||
is 1. expensive and 2. inexact. neverless it is doable. And I believe that we
|
|
||||||
should do this.
|
|
||||||
*/
|
|
||||||
hbt->marginalFactor(C(11))->print("HBT: ");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
@ -434,9 +391,6 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
||||||
for (auto &l : lvls) {
|
for (auto &l : lvls) {
|
||||||
l = -l;
|
l = -l;
|
||||||
}
|
}
|
||||||
std::copy(lvls.begin(), lvls.end(),
|
|
||||||
std::ostream_iterator<int>(std::cout, ","));
|
|
||||||
std::cout << "\n";
|
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
std::vector<int> naturalC(N - 1);
|
std::vector<int> naturalC(N - 1);
|
||||||
|
|
@ -450,40 +404,16 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
||||||
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
||||||
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
||||||
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
||||||
std::copy(lvls.begin(), lvls.end(),
|
|
||||||
std::ostream_iterator<int>(std::cout, ","));
|
|
||||||
}
|
}
|
||||||
auto ordering_full = Ordering(ordering);
|
auto ordering_full = Ordering(ordering);
|
||||||
|
|
||||||
// GTSAM_PRINT(*hfg);
|
// GTSAM_PRINT(*hfg);
|
||||||
GTSAM_PRINT(ordering_full);
|
// GTSAM_PRINT(ordering_full);
|
||||||
|
|
||||||
HybridBayesTree::shared_ptr hbt;
|
HybridBayesTree::shared_ptr hbt;
|
||||||
HybridGaussianFactorGraph::shared_ptr remaining;
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
||||||
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
||||||
|
|
||||||
// GTSAM_PRINT(*hbt);
|
|
||||||
|
|
||||||
// GTSAM_PRINT(*remaining);
|
|
||||||
|
|
||||||
{
|
|
||||||
DotWriter dw;
|
|
||||||
dw.positionHints['c'] = 2;
|
|
||||||
dw.positionHints['x'] = 1;
|
|
||||||
std::cout << hfg->dot(DefaultKeyFormatter, dw);
|
|
||||||
std::cout << "\n";
|
|
||||||
hbt->dot(std::cout);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
DotWriter dw;
|
|
||||||
// dw.positionHints['c'] = 2;
|
|
||||||
// dw.positionHints['x'] = 1;
|
|
||||||
std::cout << "\n";
|
|
||||||
std::cout << hfg->eliminateSequential(ordering_full)
|
|
||||||
->dot(DefaultKeyFormatter, dw);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto new_fg = makeSwitchingChain(12);
|
auto new_fg = makeSwitchingChain(12);
|
||||||
auto isam = HybridGaussianISAM(*hbt);
|
auto isam = HybridGaussianISAM(*hbt);
|
||||||
|
|
||||||
|
|
@ -492,13 +422,14 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
||||||
factorGraph.push_back(new_fg->at(new_fg->size() - 2));
|
factorGraph.push_back(new_fg->at(new_fg->size() - 2));
|
||||||
factorGraph.push_back(new_fg->at(new_fg->size() - 1));
|
factorGraph.push_back(new_fg->at(new_fg->size() - 1));
|
||||||
isam.update(factorGraph);
|
isam.update(factorGraph);
|
||||||
std::cout << isam.dot();
|
// std::cout << isam.dot();
|
||||||
isam.marginalFactor(C(11))->print();
|
// isam.marginalFactor(C(11))->print();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
|
// TODO(Varun) Actually test something!
|
||||||
|
TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) {
|
||||||
const int N = 7;
|
const int N = 7;
|
||||||
auto hfg = makeSwitchingChain(N, X);
|
auto hfg = makeSwitchingChain(N, X);
|
||||||
hfg->push_back(*makeSwitchingChain(N, Y, D));
|
hfg->push_back(*makeSwitchingChain(N, Y, D));
|
||||||
|
|
@ -517,21 +448,6 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
|
||||||
ordX.emplace_back(Y(i));
|
ordX.emplace_back(Y(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// {
|
|
||||||
// KeyVector ndX;
|
|
||||||
// std::vector<int> lvls;
|
|
||||||
// std::tie(ndX, lvls) = makeBinaryOrdering(naturalX);
|
|
||||||
// std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
|
|
||||||
// std::copy(lvls.begin(), lvls.end(),
|
|
||||||
// std::ostream_iterator<int>(std::cout, ","));
|
|
||||||
// std::cout << "\n";
|
|
||||||
|
|
||||||
// for (size_t i = 0; i < N; i++) {
|
|
||||||
// ordX.emplace_back(X(ndX[i]));
|
|
||||||
// ordX.emplace_back(Y(ndX[i]));
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
for (size_t i = 1; i <= N - 1; i++) {
|
for (size_t i = 1; i <= N - 1; i++) {
|
||||||
ordX.emplace_back(C(i));
|
ordX.emplace_back(C(i));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||||
|
* Atlanta, Georgia 30332-0415
|
||||||
|
* All Rights Reserved
|
||||||
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||||
|
|
||||||
|
* See LICENSE for the license information
|
||||||
|
|
||||||
|
* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file testHybridBayesNet.cpp
|
||||||
|
* @brief Unit tests for HybridBayesNet
|
||||||
|
* @author Varun Agrawal
|
||||||
|
* @author Fan Jiang
|
||||||
|
* @author Frank Dellaert
|
||||||
|
* @date December 2021
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
|
||||||
|
#include "Switching.h"
|
||||||
|
|
||||||
|
// Include for test suite
|
||||||
|
#include <CppUnitLite/TestHarness.h>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace gtsam;
|
||||||
|
using noiseModel::Isotropic;
|
||||||
|
using symbol_shorthand::M;
|
||||||
|
using symbol_shorthand::X;
|
||||||
|
|
||||||
|
static const DiscreteKey Asia(0, 2);
|
||||||
|
|
||||||
|
/* ****************************************************************************/
|
||||||
|
// Test creation
|
||||||
|
TEST(HybridBayesNet, Creation) {
|
||||||
|
HybridBayesNet bayesNet;
|
||||||
|
|
||||||
|
bayesNet.add(Asia, "99/1");
|
||||||
|
|
||||||
|
DiscreteConditional expected(Asia, "99/1");
|
||||||
|
|
||||||
|
CHECK(bayesNet.atDiscrete(0));
|
||||||
|
auto& df = *bayesNet.atDiscrete(0);
|
||||||
|
EXPECT(df.equals(expected));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ****************************************************************************/
|
||||||
|
// Test choosing an assignment of conditionals
|
||||||
|
TEST(HybridBayesNet, Choose) {
|
||||||
|
Switching s(4);
|
||||||
|
|
||||||
|
Ordering ordering;
|
||||||
|
for (auto&& kvp : s.linearizationPoint) {
|
||||||
|
ordering += kvp.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||||
|
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||||
|
std::tie(hybridBayesNet, remainingFactorGraph) =
|
||||||
|
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||||
|
|
||||||
|
DiscreteValues assignment;
|
||||||
|
assignment[M(1)] = 1;
|
||||||
|
assignment[M(2)] = 1;
|
||||||
|
assignment[M(3)] = 0;
|
||||||
|
|
||||||
|
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
|
||||||
|
|
||||||
|
EXPECT_LONGS_EQUAL(4, gbn.size());
|
||||||
|
|
||||||
|
EXPECT(assert_equal(*(*boost::dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
hybridBayesNet->atGaussian(0)))(assignment),
|
||||||
|
*gbn.at(0)));
|
||||||
|
EXPECT(assert_equal(*(*boost::dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
hybridBayesNet->atGaussian(1)))(assignment),
|
||||||
|
*gbn.at(1)));
|
||||||
|
EXPECT(assert_equal(*(*boost::dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
hybridBayesNet->atGaussian(2)))(assignment),
|
||||||
|
*gbn.at(2)));
|
||||||
|
EXPECT(assert_equal(*(*boost::dynamic_pointer_cast<GaussianMixture>(
|
||||||
|
hybridBayesNet->atGaussian(3)))(assignment),
|
||||||
|
*gbn.at(3)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
int main() {
|
||||||
|
TestResult tr;
|
||||||
|
return TestRegistry::runAllTests(tr);
|
||||||
|
}
|
||||||
|
/* ************************************************************************* */
|
||||||
Loading…
Reference in New Issue