update HybridSmoother to be more like HybridISAM, compute ordering if not given
parent
29b245d1dc
commit
488dd7838f
|
@ -57,8 +57,16 @@ Ordering HybridSmoother::getOrdering(
|
|||
|
||||
/* ************************************************************************* */
|
||||
void HybridSmoother::update(HybridGaussianFactorGraph graph,
|
||||
const Ordering &ordering,
|
||||
std::optional<size_t> maxNrLeaves) {
|
||||
std::optional<size_t> maxNrLeaves,
|
||||
const std::optional<Ordering> given_ordering) {
|
||||
Ordering ordering;
|
||||
// If no ordering provided, then we compute one
|
||||
if (!given_ordering.has_value()) {
|
||||
ordering = this->getOrdering(graph);
|
||||
} else {
|
||||
ordering = *given_ordering;
|
||||
}
|
||||
|
||||
// Add the necessary conditionals from the previous timestep(s).
|
||||
std::tie(graph, hybridBayesNet_) =
|
||||
addConditionals(graph, hybridBayesNet_, ordering);
|
||||
|
|
|
@ -44,13 +44,14 @@ class HybridSmoother {
|
|||
* corresponding to the pruned choices.
|
||||
*
|
||||
* @param graph The new factors, should be linear only
|
||||
* @param ordering The ordering for elimination, only continuous vars are
|
||||
* allowed
|
||||
* @param maxNrLeaves The maximum number of leaves in the new discrete factor,
|
||||
* if applicable
|
||||
* @param given_ordering The (optional) ordering for elimination, only
|
||||
* continuous variables are allowed
|
||||
*/
|
||||
void update(HybridGaussianFactorGraph graph, const Ordering& ordering,
|
||||
std::optional<size_t> maxNrLeaves = {});
|
||||
void update(HybridGaussianFactorGraph graph,
|
||||
std::optional<size_t> maxNrLeaves = {},
|
||||
const std::optional<Ordering> given_ordering = {});
|
||||
|
||||
Ordering getOrdering(const HybridGaussianFactorGraph& newFactors);
|
||||
|
||||
|
@ -74,4 +75,4 @@ class HybridSmoother {
|
|||
const HybridBayesNet& hybridBayesNet() const;
|
||||
};
|
||||
|
||||
}; // namespace gtsam
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -46,35 +46,6 @@ using namespace gtsam;
|
|||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Z;
|
||||
|
||||
Ordering getOrdering(HybridGaussianFactorGraph& factors,
|
||||
const HybridGaussianFactorGraph& newFactors) {
|
||||
factors.push_back(newFactors);
|
||||
// Get all the discrete keys from the factors
|
||||
KeySet allDiscrete = factors.discreteKeySet();
|
||||
|
||||
// Create KeyVector with continuous keys followed by discrete keys.
|
||||
KeyVector newKeysDiscreteLast;
|
||||
const KeySet newFactorKeys = newFactors.keys();
|
||||
// Insert continuous keys first.
|
||||
for (auto& k : newFactorKeys) {
|
||||
if (!allDiscrete.exists(k)) {
|
||||
newKeysDiscreteLast.push_back(k);
|
||||
}
|
||||
}
|
||||
|
||||
// Insert discrete keys at the end
|
||||
std::copy(allDiscrete.begin(), allDiscrete.end(),
|
||||
std::back_inserter(newKeysDiscreteLast));
|
||||
|
||||
const VariableIndex index(factors);
|
||||
|
||||
// Get an ordering where the new keys are eliminated last
|
||||
Ordering ordering = Ordering::ColamdConstrainedLast(
|
||||
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
|
||||
true);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
TEST(HybridEstimation, Full) {
|
||||
size_t K = 6;
|
||||
std::vector<double> measurements = {0, 1, 2, 2, 2, 3};
|
||||
|
@ -117,7 +88,7 @@ TEST(HybridEstimation, Full) {
|
|||
|
||||
/****************************************************************************/
|
||||
// Test approximate inference with an additional pruning step.
|
||||
TEST(HybridEstimation, Incremental) {
|
||||
TEST(HybridEstimation, IncrementalSmoother) {
|
||||
size_t K = 15;
|
||||
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
|
||||
7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
|
||||
|
@ -136,7 +107,6 @@ TEST(HybridEstimation, Incremental) {
|
|||
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
|
||||
|
||||
HybridGaussianFactorGraph linearized;
|
||||
HybridGaussianFactorGraph bayesNet;
|
||||
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
// Motion Model
|
||||
|
@ -146,11 +116,10 @@ TEST(HybridEstimation, Incremental) {
|
|||
|
||||
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
|
||||
|
||||
bayesNet = smoother.hybridBayesNet();
|
||||
linearized = *graph.linearize(initial);
|
||||
Ordering ordering = getOrdering(bayesNet, linearized);
|
||||
Ordering ordering = smoother.getOrdering(linearized);
|
||||
|
||||
smoother.update(linearized, ordering, 3);
|
||||
smoother.update(linearized, 3, ordering);
|
||||
graph.resize(0);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue