address review comments
parent
5e2cdfdd3b
commit
239412956c
|
|
@ -557,9 +557,9 @@ HybridGaussianFactorGraph::eliminateHybridSequential(
|
||||||
const boost::optional<Ordering> continuous,
|
const boost::optional<Ordering> continuous,
|
||||||
const boost::optional<Ordering> discrete, const Eliminate &function,
|
const boost::optional<Ordering> discrete, const Eliminate &function,
|
||||||
OptionalVariableIndex variableIndex) const {
|
OptionalVariableIndex variableIndex) const {
|
||||||
Ordering continuous_ordering =
|
const Ordering continuous_ordering =
|
||||||
continuous ? *continuous : Ordering(this->continuousKeys());
|
continuous ? *continuous : Ordering(this->continuousKeys());
|
||||||
Ordering discrete_ordering =
|
const Ordering discrete_ordering =
|
||||||
discrete ? *discrete : Ordering(this->discreteKeys());
|
discrete ? *discrete : Ordering(this->discreteKeys());
|
||||||
|
|
||||||
// Eliminate continuous
|
// Eliminate continuous
|
||||||
|
|
@ -570,7 +570,8 @@ HybridGaussianFactorGraph::eliminateHybridSequential(
|
||||||
function, variableIndex);
|
function, variableIndex);
|
||||||
|
|
||||||
// Get the last continuous conditional which will have all the discrete keys
|
// Get the last continuous conditional which will have all the discrete keys
|
||||||
auto last_conditional = bayesNet->at(bayesNet->size() - 1);
|
HybridConditional::shared_ptr last_conditional =
|
||||||
|
bayesNet->at(bayesNet->size() - 1);
|
||||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||||
|
|
||||||
// If not discrete variables, return the eliminated bayes net.
|
// If not discrete variables, return the eliminated bayes net.
|
||||||
|
|
@ -578,9 +579,11 @@ HybridGaussianFactorGraph::eliminateHybridSequential(
|
||||||
return bayesNet;
|
return bayesNet;
|
||||||
}
|
}
|
||||||
|
|
||||||
AlgebraicDecisionTree<Key> probPrimeTree =
|
// DecisionTree for P'(X|M, Z) for all mode sequences M
|
||||||
|
const AlgebraicDecisionTree<Key> probPrimeTree =
|
||||||
this->continuousProbPrimes(discrete_keys, bayesNet);
|
this->continuousProbPrimes(discrete_keys, bayesNet);
|
||||||
|
|
||||||
|
// Add the model selection factor P(M|Z)
|
||||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||||
|
|
||||||
// Perform discrete elimination
|
// Perform discrete elimination
|
||||||
|
|
@ -622,9 +625,9 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal(
|
||||||
const boost::optional<Ordering> continuous,
|
const boost::optional<Ordering> continuous,
|
||||||
const boost::optional<Ordering> discrete, const Eliminate &function,
|
const boost::optional<Ordering> discrete, const Eliminate &function,
|
||||||
OptionalVariableIndex variableIndex) const {
|
OptionalVariableIndex variableIndex) const {
|
||||||
Ordering continuous_ordering =
|
const Ordering continuous_ordering =
|
||||||
continuous ? *continuous : Ordering(this->continuousKeys());
|
continuous ? *continuous : Ordering(this->continuousKeys());
|
||||||
Ordering discrete_ordering =
|
const Ordering discrete_ordering =
|
||||||
discrete ? *discrete : Ordering(this->discreteKeys());
|
discrete ? *discrete : Ordering(this->discreteKeys());
|
||||||
|
|
||||||
// Eliminate continuous
|
// Eliminate continuous
|
||||||
|
|
@ -635,9 +638,9 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal(
|
||||||
function, variableIndex);
|
function, variableIndex);
|
||||||
|
|
||||||
// Get the last continuous conditional which will have all the discrete
|
// Get the last continuous conditional which will have all the discrete
|
||||||
Key last_continuous_key =
|
const Key last_continuous_key = continuous_ordering.back();
|
||||||
continuous_ordering.at(continuous_ordering.size() - 1);
|
HybridConditional::shared_ptr last_conditional =
|
||||||
auto last_conditional = (*bayesTree)[last_continuous_key]->conditional();
|
(*bayesTree)[last_continuous_key]->conditional();
|
||||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||||
|
|
||||||
// If not discrete variables, return the eliminated bayes net.
|
// If not discrete variables, return the eliminated bayes net.
|
||||||
|
|
@ -645,16 +648,24 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal(
|
||||||
return bayesTree;
|
return bayesTree;
|
||||||
}
|
}
|
||||||
|
|
||||||
AlgebraicDecisionTree<Key> probPrimeTree =
|
// DecisionTree for P'(X|M, Z) for all mode sequences M
|
||||||
|
const AlgebraicDecisionTree<Key> probPrimeTree =
|
||||||
this->continuousProbPrimes(discrete_keys, bayesTree);
|
this->continuousProbPrimes(discrete_keys, bayesTree);
|
||||||
|
|
||||||
|
// Add the model selection factor P(M|Z)
|
||||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||||
|
|
||||||
auto updatedBayesTree =
|
// Eliminate discrete variables to get the discrete bayes tree.
|
||||||
|
// This bayes tree will be updated with the
|
||||||
|
// continuous variables as the child nodes.
|
||||||
|
HybridBayesTree::shared_ptr updatedBayesTree =
|
||||||
discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering,
|
discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering,
|
||||||
function);
|
function);
|
||||||
|
|
||||||
auto discrete_clique = (*updatedBayesTree)[discrete_ordering.at(0)];
|
// Get the clique with all the discrete keys.
|
||||||
|
// There should only be 1 clique.
|
||||||
|
const HybridBayesTree::sharedClique discrete_clique =
|
||||||
|
(*updatedBayesTree)[discrete_ordering.at(0)];
|
||||||
|
|
||||||
std::set<HybridBayesTreeClique::shared_ptr> clique_set;
|
std::set<HybridBayesTreeClique::shared_ptr> clique_set;
|
||||||
for (auto node : bayesTree->nodes()) {
|
for (auto node : bayesTree->nodes()) {
|
||||||
|
|
|
||||||
|
|
@ -217,8 +217,10 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
|
||||||
const DiscreteValues& discreteValues) const;
|
const DiscreteValues& discreteValues) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Compute the VectorValues solution for the continuous variables for
|
* @brief Helper method to compute the VectorValues solution for the
|
||||||
* each mode.
|
* continuous variables for each discrete mode.
|
||||||
|
* Used as a helper to compute q(\mu | M, Z) which is used by
|
||||||
|
* both P(X | M, Z) and P(M | Z).
|
||||||
*
|
*
|
||||||
* @tparam BAYES Template on the type of Bayes graph, either a bayes net or a
|
* @tparam BAYES Template on the type of Bayes graph, either a bayes net or a
|
||||||
* bayes tree.
|
* bayes tree.
|
||||||
|
|
|
||||||
|
|
@ -141,7 +141,6 @@ TEST(HybridBayesTree, Optimize) {
|
||||||
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}};
|
DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}};
|
||||||
vector<double> probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656,
|
vector<double> probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656,
|
||||||
0.037152205, 0.12248971, 0.07349729, 0.08};
|
0.037152205, 0.12248971, 0.07349729, 0.08};
|
||||||
AlgebraicDecisionTree<Key> potentials(discrete_keys, probs);
|
|
||||||
dfg.emplace_shared<DecisionTreeFactor>(discrete_keys, probs);
|
dfg.emplace_shared<DecisionTreeFactor>(discrete_keys, probs);
|
||||||
|
|
||||||
DiscreteValues expectedMPE = dfg.optimize();
|
DiscreteValues expectedMPE = dfg.optimize();
|
||||||
|
|
|
||||||
|
|
@ -79,6 +79,8 @@ TEST(HybridEstimation, Incremental) {
|
||||||
// Ground truth discrete seq
|
// Ground truth discrete seq
|
||||||
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
|
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
|
||||||
1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
|
1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
|
||||||
|
// Switching example of robot moving in 1D with given measurements and equal
|
||||||
|
// mode priors.
|
||||||
Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
|
Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
|
||||||
HybridSmoother smoother;
|
HybridSmoother smoother;
|
||||||
HybridNonlinearFactorGraph graph;
|
HybridNonlinearFactorGraph graph;
|
||||||
|
|
@ -136,7 +138,7 @@ TEST(HybridEstimation, Incremental) {
|
||||||
* @param between_sigma Noise model sigma for the between factor.
|
* @param between_sigma Noise model sigma for the between factor.
|
||||||
* @return GaussianFactorGraph::shared_ptr
|
* @return GaussianFactorGraph::shared_ptr
|
||||||
*/
|
*/
|
||||||
GaussianFactorGraph::shared_ptr specificProblem(
|
GaussianFactorGraph::shared_ptr specificModesFactorGraph(
|
||||||
size_t K, const std::vector<double>& measurements,
|
size_t K, const std::vector<double>& measurements,
|
||||||
const std::vector<size_t>& discrete_seq, double measurement_sigma = 0.1,
|
const std::vector<size_t>& discrete_seq, double measurement_sigma = 0.1,
|
||||||
double between_sigma = 1.0) {
|
double between_sigma = 1.0) {
|
||||||
|
|
@ -184,7 +186,7 @@ std::vector<size_t> getDiscreteSequence(size_t x) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Helper method to get the probPrimeTree
|
* @brief Helper method to get the tree of unnormalized probabilities
|
||||||
* as per the new elimination scheme.
|
* as per the new elimination scheme.
|
||||||
*
|
*
|
||||||
* @param graph The HybridGaussianFactorGraph to eliminate.
|
* @param graph The HybridGaussianFactorGraph to eliminate.
|
||||||
|
|
@ -242,18 +244,15 @@ AlgebraicDecisionTree<Key> probPrimeTree(
|
||||||
TEST(HybridEstimation, Probability) {
|
TEST(HybridEstimation, Probability) {
|
||||||
constexpr size_t K = 4;
|
constexpr size_t K = 4;
|
||||||
std::vector<double> measurements = {0, 1, 2, 2};
|
std::vector<double> measurements = {0, 1, 2, 2};
|
||||||
|
|
||||||
// This is the correct sequence
|
|
||||||
// std::vector<size_t> discrete_seq = {1, 1, 0};
|
|
||||||
|
|
||||||
double between_sigma = 1.0, measurement_sigma = 0.1;
|
double between_sigma = 1.0, measurement_sigma = 0.1;
|
||||||
|
|
||||||
std::vector<double> expected_errors, expected_prob_primes;
|
std::vector<double> expected_errors, expected_prob_primes;
|
||||||
|
std::map<size_t, std::vector<size_t>> discrete_seq_map;
|
||||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||||
std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i);
|
discrete_seq_map[i] = getDiscreteSequence<K>(i);
|
||||||
|
|
||||||
GaussianFactorGraph::shared_ptr linear_graph = specificProblem(
|
GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph(
|
||||||
K, measurements, discrete_seq, measurement_sigma, between_sigma);
|
K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma);
|
||||||
|
|
||||||
auto bayes_net = linear_graph->eliminateSequential();
|
auto bayes_net = linear_graph->eliminateSequential();
|
||||||
|
|
||||||
|
|
@ -263,7 +262,10 @@ TEST(HybridEstimation, Probability) {
|
||||||
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
||||||
}
|
}
|
||||||
|
|
||||||
Switching switching(K, between_sigma, measurement_sigma, measurements);
|
// Switching example of robot moving in 1D with given measurements and equal
|
||||||
|
// mode priors.
|
||||||
|
Switching switching(K, between_sigma, measurement_sigma, measurements,
|
||||||
|
"1/1 1/1");
|
||||||
auto graph = switching.linearizedFactorGraph;
|
auto graph = switching.linearizedFactorGraph;
|
||||||
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
||||||
|
|
||||||
|
|
@ -298,26 +300,30 @@ TEST(HybridEstimation, Probability) {
|
||||||
// Test if the probPrimeTree matches the probability of
|
// Test if the probPrimeTree matches the probability of
|
||||||
// the individual factor graphs
|
// the individual factor graphs
|
||||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||||
std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i);
|
|
||||||
Assignment<Key> discrete_assignment;
|
Assignment<Key> discrete_assignment;
|
||||||
for (size_t v = 0; v < discrete_seq.size(); v++) {
|
for (size_t v = 0; v < discrete_seq_map[i].size(); v++) {
|
||||||
discrete_assignment[M(v)] = discrete_seq[v];
|
discrete_assignment[M(v)] = discrete_seq_map[i][v];
|
||||||
}
|
}
|
||||||
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
|
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
|
||||||
probPrimeTree(discrete_assignment), 1e-8);
|
probPrimeTree(discrete_assignment), 1e-8);
|
||||||
}
|
}
|
||||||
|
|
||||||
// remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||||
|
|
||||||
// Ordering discrete(graph.discreteKeys());
|
Ordering discrete(graph.discreteKeys());
|
||||||
// // remainingGraph->print("remainingGraph");
|
auto discreteBayesNet =
|
||||||
// // discrete.print();
|
discreteGraph->BaseEliminateable::eliminateSequential(discrete);
|
||||||
// auto discreteBayesNet = remainingGraph->eliminateSequential(discrete);
|
bayesNet->add(*discreteBayesNet);
|
||||||
// bayesNet->add(*discreteBayesNet);
|
|
||||||
// // bayesNet->print();
|
|
||||||
|
|
||||||
// HybridValues hybrid_values = bayesNet->optimize();
|
HybridValues hybrid_values = bayesNet->optimize();
|
||||||
// hybrid_values.discrete().print();
|
|
||||||
|
// This is the correct sequence as designed
|
||||||
|
DiscreteValues discrete_seq;
|
||||||
|
discrete_seq[M(0)] = 1;
|
||||||
|
discrete_seq[M(1)] = 1;
|
||||||
|
discrete_seq[M(2)] = 0;
|
||||||
|
|
||||||
|
EXPECT(assert_equal(discrete_seq, hybrid_values.discrete()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/****************************************************************************/
|
/****************************************************************************/
|
||||||
|
|
@ -330,31 +336,34 @@ TEST(HybridEstimation, ProbabilityMultifrontal) {
|
||||||
constexpr size_t K = 4;
|
constexpr size_t K = 4;
|
||||||
std::vector<double> measurements = {0, 1, 2, 2};
|
std::vector<double> measurements = {0, 1, 2, 2};
|
||||||
|
|
||||||
// This is the correct sequence
|
|
||||||
// std::vector<size_t> discrete_seq = {1, 1, 0};
|
|
||||||
|
|
||||||
double between_sigma = 1.0, measurement_sigma = 0.1;
|
double between_sigma = 1.0, measurement_sigma = 0.1;
|
||||||
|
|
||||||
|
// For each discrete mode sequence, create the individual factor graphs and
|
||||||
|
// optimize each.
|
||||||
std::vector<double> expected_errors, expected_prob_primes;
|
std::vector<double> expected_errors, expected_prob_primes;
|
||||||
|
std::map<size_t, std::vector<size_t>> discrete_seq_map;
|
||||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||||
std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i);
|
discrete_seq_map[i] = getDiscreteSequence<K>(i);
|
||||||
|
|
||||||
GaussianFactorGraph::shared_ptr linear_graph = specificProblem(
|
GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph(
|
||||||
K, measurements, discrete_seq, measurement_sigma, between_sigma);
|
K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma);
|
||||||
|
|
||||||
auto bayes_tree = linear_graph->eliminateMultifrontal();
|
auto bayes_tree = linear_graph->eliminateMultifrontal();
|
||||||
|
|
||||||
VectorValues values = bayes_tree->optimize();
|
VectorValues values = bayes_tree->optimize();
|
||||||
|
|
||||||
std::cout << i << " " << linear_graph->error(values) << std::endl;
|
|
||||||
expected_errors.push_back(linear_graph->error(values));
|
expected_errors.push_back(linear_graph->error(values));
|
||||||
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
||||||
}
|
}
|
||||||
|
|
||||||
Switching switching(K, between_sigma, measurement_sigma, measurements);
|
// Switching example of robot moving in 1D with given measurements and equal
|
||||||
|
// mode priors.
|
||||||
|
Switching switching(K, between_sigma, measurement_sigma, measurements,
|
||||||
|
"1/1 1/1");
|
||||||
auto graph = switching.linearizedFactorGraph;
|
auto graph = switching.linearizedFactorGraph;
|
||||||
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
||||||
|
|
||||||
|
// Get the tree of unnormalized probabilities for each mode sequence.
|
||||||
AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph);
|
AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph);
|
||||||
|
|
||||||
// Eliminate continuous
|
// Eliminate continuous
|
||||||
|
|
@ -379,10 +388,9 @@ TEST(HybridEstimation, ProbabilityMultifrontal) {
|
||||||
// Test if the probPrimeTree matches the probability of
|
// Test if the probPrimeTree matches the probability of
|
||||||
// the individual factor graphs
|
// the individual factor graphs
|
||||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||||
std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i);
|
|
||||||
Assignment<Key> discrete_assignment;
|
Assignment<Key> discrete_assignment;
|
||||||
for (size_t v = 0; v < discrete_seq.size(); v++) {
|
for (size_t v = 0; v < discrete_seq_map[i].size(); v++) {
|
||||||
discrete_assignment[M(v)] = discrete_seq[v];
|
discrete_assignment[M(v)] = discrete_seq_map[i][v];
|
||||||
}
|
}
|
||||||
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
|
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
|
||||||
probPrimeTree(discrete_assignment), 1e-8);
|
probPrimeTree(discrete_assignment), 1e-8);
|
||||||
|
|
@ -390,13 +398,44 @@ TEST(HybridEstimation, ProbabilityMultifrontal) {
|
||||||
|
|
||||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||||
|
|
||||||
// Ordering discrete(graph.discreteKeys());
|
Ordering discrete(graph.discreteKeys());
|
||||||
// auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete);
|
auto discreteBayesTree =
|
||||||
// // DiscreteBayesTree should have only 1 clique
|
discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete);
|
||||||
// bayesTree->addClique((*discreteBayesTree)[discrete.at(0)]);
|
|
||||||
|
|
||||||
// // HybridValues hybrid_values = bayesNet->optimize();
|
EXPECT_LONGS_EQUAL(1, discreteBayesTree->size());
|
||||||
// // hybrid_values.discrete().print();
|
// DiscreteBayesTree should have only 1 clique
|
||||||
|
auto discrete_clique = (*discreteBayesTree)[discrete.at(0)];
|
||||||
|
|
||||||
|
std::set<HybridBayesTreeClique::shared_ptr> clique_set;
|
||||||
|
for (auto node : bayesTree->nodes()) {
|
||||||
|
clique_set.insert(node.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the root of the bayes tree as the discrete clique
|
||||||
|
for (auto clique : clique_set) {
|
||||||
|
if (clique->conditional()->parents() ==
|
||||||
|
discrete_clique->conditional()->frontals()) {
|
||||||
|
discreteBayesTree->addClique(clique, discrete_clique);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Remove the clique from the children of the parents since it will get
|
||||||
|
// added again in addClique.
|
||||||
|
auto clique_it = std::find(clique->parent()->children.begin(),
|
||||||
|
clique->parent()->children.end(), clique);
|
||||||
|
clique->parent()->children.erase(clique_it);
|
||||||
|
discreteBayesTree->addClique(clique, clique->parent());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
HybridValues hybrid_values = discreteBayesTree->optimize();
|
||||||
|
|
||||||
|
// This is the correct sequence as designed
|
||||||
|
DiscreteValues discrete_seq;
|
||||||
|
discrete_seq[M(0)] = 1;
|
||||||
|
discrete_seq[M(1)] = 1;
|
||||||
|
discrete_seq[M(2)] = 0;
|
||||||
|
|
||||||
|
EXPECT(assert_equal(discrete_seq, hybrid_values.discrete()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
||||||
|
|
@ -176,7 +176,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
||||||
|
|
||||||
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
|
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
|
||||||
|
|
||||||
// Test if the probability values are as expected with regression tests.
|
// Test the probability values with regression tests.
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
||||||
assignment[M(0)] = 0;
|
assignment[M(0)] = 0;
|
||||||
|
|
|
||||||
|
|
@ -195,7 +195,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
||||||
auto discreteConditional =
|
auto discreteConditional =
|
||||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||||
|
|
||||||
// Test if the probability values are as expected with regression tests.
|
// Test the probability values with regression tests.
|
||||||
DiscreteValues assignment;
|
DiscreteValues assignment;
|
||||||
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
||||||
assignment[M(0)] = 0;
|
assignment[M(0)] = 0;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue