update tests

release/4.3a0
Varun Agrawal 2023-01-06 10:12:50 -05:00
parent e43fd3e8ca
commit 74998336d9
8 changed files with 220 additions and 103 deletions

View File

@ -185,9 +185,8 @@ TEST(HybridBayesNet, OptimizeAssignment) {
TEST(HybridBayesNet, Optimize) {
Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1");
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
s.linearizedFactorGraph.eliminateSequential();
HybridValues delta = hybridBayesNet->optimize();
@ -212,9 +211,8 @@ TEST(HybridBayesNet, Optimize) {
TEST(HybridBayesNet, Error) {
Switching s(3);
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
s.linearizedFactorGraph.eliminateSequential();
HybridValues delta = hybridBayesNet->optimize();
auto error_tree = hybridBayesNet->error(delta.continuous());
@ -266,9 +264,8 @@ TEST(HybridBayesNet, Error) {
TEST(HybridBayesNet, Prune) {
Switching s(4);
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
s.linearizedFactorGraph.eliminateSequential();
HybridValues delta = hybridBayesNet->optimize();
@ -284,9 +281,8 @@ TEST(HybridBayesNet, Prune) {
TEST(HybridBayesNet, UpdateDiscreteConditionals) {
Switching s(4);
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
s.linearizedFactorGraph.eliminateSequential();
size_t maxNrLeaves = 3;
auto discreteConditionals = hybridBayesNet->discreteConditionals();
@ -353,8 +349,7 @@ TEST(HybridBayesNet, Sampling) {
// Create the factor graph from the nonlinear factor graph.
HybridGaussianFactorGraph::shared_ptr fg = nfg.linearize(initial);
// Eliminate into BN
Ordering ordering = fg->getHybridOrdering();
HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering);
HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
// Set up sampling
std::mt19937_64 gen(11);

View File

@ -37,9 +37,8 @@ using symbol_shorthand::X;
TEST(HybridBayesTree, OptimizeMultifrontal) {
Switching s(4);
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesTree::shared_ptr hybridBayesTree =
s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering);
s.linearizedFactorGraph.eliminateMultifrontal();
HybridValues delta = hybridBayesTree->optimize();
VectorValues expectedValues;
@ -203,17 +202,7 @@ TEST(HybridBayesTree, Choose) {
GaussianBayesTree gbt = isam.choose(assignment);
Ordering ordering;
ordering += X(0);
ordering += X(1);
ordering += X(2);
ordering += X(3);
ordering += M(0);
ordering += M(1);
ordering += M(2);
// TODO(Varun) get segfault if ordering not provided
auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(ordering);
auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal();
auto expected_gbt = bayesTree->choose(assignment);

View File

@ -90,7 +90,7 @@ TEST(HybridEstimation, Full) {
}
HybridBayesNet::shared_ptr bayesNet =
graph.eliminateSequential(hybridOrdering);
graph.eliminateSequential();
EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size());
@ -481,8 +481,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) {
const auto fg = createHybridGaussianFactorGraph();
// 2. Eliminate into BN
const Ordering ordering = fg->getHybridOrdering();
const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering);
const HybridBayesNet::shared_ptr bn = fg->eliminateSequential();
// Set up sampling
std::mt19937_64 rng(11);

View File

@ -130,8 +130,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
auto result =
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)}));
auto result = hfg.eliminateSequential();
auto dc = result->at(2)->asDiscrete();
DiscreteValues dv;
@ -161,8 +160,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
// Joint discrete probability table for c1, c2
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
HybridBayesNet::shared_ptr result = hfg.eliminateSequential(
Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)}));
HybridBayesNet::shared_ptr result = hfg.eliminateSequential();
// There are 4 variables (2 continuous + 2 discrete) in the bayes net.
EXPECT_LONGS_EQUAL(4, result->size());
@ -187,8 +185,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
// variable throws segfault
// hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
HybridBayesTree::shared_ptr result =
hfg.eliminateMultifrontal(hfg.getHybridOrdering());
HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal();
// The bayes tree should have 3 cliques
EXPECT_LONGS_EQUAL(3, result->size());
@ -218,7 +215,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8})));
// Get a constrained ordering keeping c1 last
auto ordering_full = hfg.getHybridOrdering();
auto ordering_full = HybridOrdering(hfg);
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
@ -518,8 +515,7 @@ TEST(HybridGaussianFactorGraph, optimize) {
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
auto result =
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)}));
auto result = hfg.eliminateSequential();
HybridValues hv = result->optimize();
@ -572,9 +568,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
HybridGaussianFactorGraph graph = s.linearizedFactorGraph;
Ordering hybridOrdering = graph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
graph.eliminateSequential(hybridOrdering);
HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
const HybridValues delta = hybridBayesNet->optimize();
const double error = graph.error(delta);
@ -593,9 +587,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
HybridGaussianFactorGraph graph = s.linearizedFactorGraph;
Ordering hybridOrdering = graph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
graph.eliminateSequential(hybridOrdering);
HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
HybridValues delta = hybridBayesNet->optimize();
auto error_tree = graph.error(delta.continuous());
@ -684,10 +676,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26"));
// Test elimination
Ordering ordering;
ordering.push_back(X(0));
ordering.push_back(M(0));
const auto posterior = fg.eliminateSequential(ordering);
const auto posterior = fg.eliminateSequential();
EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
}
@ -719,10 +708,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77"));
// Test elimination
Ordering ordering;
ordering.push_back(X(0));
ordering.push_back(M(0));
const auto posterior = fg.eliminateSequential(ordering);
const auto posterior = fg.eliminateSequential();
EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01));
}
@ -741,11 +727,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny22) {
EXPECT_LONGS_EQUAL(5, fg.size());
// Test elimination
Ordering ordering;
ordering.push_back(X(0));
ordering.push_back(M(0));
ordering.push_back(M(1));
const auto posterior = fg.eliminateSequential(ordering);
const auto posterior = fg.eliminateSequential();
// Compute the log-ratio between the Bayes net and the factor graph.
auto compute_ratio = [&](HybridValues *sample) -> double {

View File

@ -0,0 +1,158 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridPruning.cpp
* @brief Unit tests for end-to-end Hybrid Estimation
* @author Varun Agrawal
*/
#include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h>
#include <gtsam/hybrid/HybridSmoother.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianBayesTree.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include "Switching.h"
using namespace std;
using namespace gtsam;
using symbol_shorthand::X;
/****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST_DISABLED(HybridPruning, ISAM) {
size_t K = 16;
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
// Ground truth discrete seq
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
// Switching example of robot moving in 1D
// with given measurements and equal mode priors.
Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
HybridNonlinearISAM isam;
HybridNonlinearFactorGraph graph;
Values initial;
// Add the X(0) prior
graph.push_back(switching.nonlinearFactorGraph.at(0));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
HybridGaussianFactorGraph linearized;
HybridGaussianFactorGraph bayesNet;
for (size_t k = 1; k < K; k++) {
// Motion Model
graph.push_back(switching.nonlinearFactorGraph.at(k));
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
isam.update(graph, initial, 3);
graph.resize(0);
initial.clear();
}
Values result = isam.estimate();
DiscreteValues assignment = isam.assignment();
DiscreteValues expected_discrete;
for (size_t k = 0; k < K - 1; k++) {
expected_discrete[M(k)] = discrete_seq[k];
}
std::cout << "\n\n\nNonlinear Version!!\n\n" << std::endl;
GTSAM_PRINT(expected_discrete);
GTSAM_PRINT(assignment);
EXPECT(assert_equal(expected_discrete, assignment));
Values expected_continuous;
for (size_t k = 0; k < K; k++) {
expected_continuous.insert(X(k), measurements[k]);
}
EXPECT(assert_equal(expected_continuous, result));
}
/****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST(HybridPruning, GaussianISAM) {
size_t K = 16;
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
// Ground truth discrete seq
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
// Switching example of robot moving in 1D
// with given measurements and equal mode priors.
Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
HybridGaussianISAM isam;
HybridGaussianFactorGraph graph;
Values initial;
// Add the X(0) prior
graph.push_back(switching.linearizedFactorGraph.at(0));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
HybridGaussianFactorGraph linearized;
HybridGaussianFactorGraph bayesNet;
for (size_t k = 1; k < K; k++) {
// Motion Model
graph.push_back(switching.linearizedFactorGraph.at(k));
// Measurement
graph.push_back(switching.linearizedFactorGraph.at(k + K - 1));
// initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
isam.update(graph, 3);
graph.resize(0);
// initial.clear();
}
HybridValues values = isam.optimize();
DiscreteValues expected_discrete;
for (size_t k = 0; k < K - 1; k++) {
expected_discrete[M(k)] = discrete_seq[k];
}
EXPECT(assert_equal(expected_discrete, values.discrete()));
// Values expected_continuous;
// for (size_t k = 0; k < K; k++) {
// expected_continuous.insert(X(k), measurements[k]);
// }
// EXPECT(assert_equal(expected_continuous, result));
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */

View File

@ -150,8 +150,7 @@ TEST(HybridSerialization, GaussianMixture) {
// Test HybridBayesNet serialization.
TEST(HybridSerialization, HybridBayesNet) {
Switching s(2);
Ordering ordering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering));
HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential());
EXPECT(equalsObj<HybridBayesNet>(hbn));
EXPECT(equalsXML<HybridBayesNet>(hbn));
@ -162,9 +161,7 @@ TEST(HybridSerialization, HybridBayesNet) {
// Test HybridBayesTree serialization.
TEST(HybridSerialization, HybridBayesTree) {
Switching s(2);
Ordering ordering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesTree hbt =
*(s.linearizedFactorGraph.eliminateMultifrontal(ordering));
HybridBayesTree hbt = *(s.linearizedFactorGraph.eliminateMultifrontal());
EXPECT(equalsObj<HybridBayesTree>(hbt));
EXPECT(equalsXML<HybridBayesTree>(hbt));

View File

@ -25,7 +25,6 @@ from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional,
class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph."""
def test_create(self):
"""Test construction of hybrid factor graph."""
model = noiseModel.Unit.Create(3)
@ -42,9 +41,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
hfg.push_back(jf2)
hfg.push_back(gmf)
hbn = hfg.eliminateSequential(
Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(
hfg, [C(0)]))
hbn = hfg.eliminateSequential()
self.assertEqual(hbn.size(), 2)
@ -74,15 +71,14 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
dtf = gtsam.DecisionTreeFactor([(C(0), 2)], "0 1")
hfg.push_back(dtf)
hbn = hfg.eliminateSequential(
Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(
hfg, [C(0)]))
hbn = hfg.eliminateSequential()
hv = hbn.optimize()
self.assertEqual(hv.atDiscrete(C(0)), 1)
@staticmethod
def tiny(num_measurements: int = 1, prior_mean: float = 5.0,
def tiny(num_measurements: int = 1,
prior_mean: float = 5.0,
prior_sigma: float = 0.5) -> HybridBayesNet:
"""
Create a tiny two variable hybrid model which represents
@ -129,20 +125,23 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
bayesNet2 = self.tiny(prior_sigma=5.0, num_measurements=1)
# bn1: # 1/sqrt(2*pi*0.5^2)
# bn2: # 1/sqrt(2*pi*5.0^2)
expected_ratio = np.sqrt(2*np.pi*5.0**2)/np.sqrt(2*np.pi*0.5**2)
expected_ratio = np.sqrt(2 * np.pi * 5.0**2) / np.sqrt(
2 * np.pi * 0.5**2)
mean0 = HybridValues()
mean0.insert(X(0), [5.0])
mean0.insert(Z(0), [5.0])
mean0.insert(M(0), 0)
self.assertAlmostEqual(bayesNet1.evaluate(mean0) /
bayesNet2.evaluate(mean0), expected_ratio,
bayesNet2.evaluate(mean0),
expected_ratio,
delta=1e-9)
mean1 = HybridValues()
mean1.insert(X(0), [5.0])
mean1.insert(Z(0), [5.0])
mean1.insert(M(0), 1)
self.assertAlmostEqual(bayesNet1.evaluate(mean1) /
bayesNet2.evaluate(mean1), expected_ratio,
bayesNet2.evaluate(mean1),
expected_ratio,
delta=1e-9)
@staticmethod
@ -171,11 +170,13 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
return fg
@classmethod
def estimate_marginals(cls, target, proposal_density: HybridBayesNet,
def estimate_marginals(cls,
target,
proposal_density: HybridBayesNet,
N=10000):
"""Do importance sampling to estimate discrete marginal P(mode)."""
# Allocate space for marginals on mode.
marginals = np.zeros((2,))
marginals = np.zeros((2, ))
# Do importance sampling.
for s in range(N):
@ -210,14 +211,15 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
return bayesNet.evaluate(x)
# Create proposal density on (x0, mode), making sure it has same mean:
posterior_information = 1/(prior_sigma**2) + 1/(0.5**2)
posterior_information = 1 / (prior_sigma**2) + 1 / (0.5**2)
posterior_sigma = posterior_information**(-0.5)
proposal_density = self.tiny(
num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma)
proposal_density = self.tiny(num_measurements=0,
prior_mean=5.0,
prior_sigma=posterior_sigma)
# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(
target=unnormalized_posterior, proposal_density=proposal_density)
marginals = self.estimate_marginals(target=unnormalized_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; Z) = {marginals[0]}")
# print(f"P(mode=1; Z) = {marginals[1]}")
@ -230,10 +232,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
self.assertEqual(fg.size(), 3)
# Test elimination.
ordering = gtsam.Ordering()
ordering.push_back(X(0))
ordering.push_back(M(0))
posterior = fg.eliminateSequential(ordering)
posterior = fg.eliminateSequential()
def true_posterior(x):
"""Posterior from elimination."""
@ -241,8 +240,8 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
return posterior.evaluate(x)
# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(
target=true_posterior, proposal_density=proposal_density)
marginals = self.estimate_marginals(target=true_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; z0) = {marginals[0]}")
# print(f"P(mode=1; z0) = {marginals[1]}")
@ -253,8 +252,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
@staticmethod
def calculate_ratio(bayesNet: HybridBayesNet,
fg: HybridGaussianFactorGraph,
sample: HybridValues):
fg: HybridGaussianFactorGraph, sample: HybridValues):
"""Calculate ratio between Bayes net and factor graph."""
return bayesNet.evaluate(sample) / fg.probPrime(sample) if \
fg.probPrime(sample) > 0 else 0
@ -285,14 +283,15 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
return bayesNet.evaluate(x)
# Create proposal density on (x0, mode), making sure it has same mean:
posterior_information = 1/(prior_sigma**2) + 2.0/(3.0**2)
posterior_information = 1 / (prior_sigma**2) + 2.0 / (3.0**2)
posterior_sigma = posterior_information**(-0.5)
proposal_density = self.tiny(
num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma)
proposal_density = self.tiny(num_measurements=0,
prior_mean=5.0,
prior_sigma=posterior_sigma)
# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(
target=unnormalized_posterior, proposal_density=proposal_density)
marginals = self.estimate_marginals(target=unnormalized_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; Z) = {marginals[0]}")
# print(f"P(mode=1; Z) = {marginals[1]}")
@ -319,10 +318,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
self.assertAlmostEqual(ratio, expected_ratio)
# Test elimination.
ordering = gtsam.Ordering()
ordering.push_back(X(0))
ordering.push_back(M(0))
posterior = fg.eliminateSequential(ordering)
posterior = fg.eliminateSequential()
# Calculate ratio between Bayes net probability and the factor graph:
expected_ratio = self.calculate_ratio(posterior, fg, values)

View File

@ -14,22 +14,27 @@ from __future__ import print_function
import unittest
import gtsam
import numpy as np
from gtsam.symbol_shorthand import C, X
from gtsam.utils.test_case import GtsamTestCase
import gtsam
class TestHybridGaussianFactorGraph(GtsamTestCase):
"""Unit tests for HybridGaussianFactorGraph."""
def test_nonlinear_hybrid(self):
nlfg = gtsam.HybridNonlinearFactorGraph()
dk = gtsam.DiscreteKeys()
dk.push_back((10, 2))
nlfg.add(gtsam.BetweenFactorPoint3(1, 2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([1, 1, 1])))
nlfg.add(
gtsam.PriorFactorPoint3(2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
gtsam.BetweenFactorPoint3(
1, 2, gtsam.Point3(1, 2, 3),
gtsam.noiseModel.Diagonal.Variances([1, 1, 1])))
nlfg.add(
gtsam.PriorFactorPoint3(
2, gtsam.Point3(1, 2, 3),
gtsam.noiseModel.Diagonal.Variances([0.5, 0.5, 0.5])))
nlfg.push_back(
gtsam.MixtureFactor([1], dk, [
gtsam.PriorFactorPoint3(1, gtsam.Point3(0, 0, 0),
@ -42,11 +47,7 @@ class TestHybridGaussianFactorGraph(GtsamTestCase):
values.insert_point3(1, gtsam.Point3(0, 0, 0))
values.insert_point3(2, gtsam.Point3(2, 3, 1))
hfg = nlfg.linearize(values)
o = gtsam.Ordering()
o.push_back(1)
o.push_back(2)
o.push_back(10)
hbn = hfg.eliminateSequential(o)
hbn = hfg.eliminateSequential()
hbv = hbn.optimize()
self.assertEqual(hbv.atDiscrete(10), 0)