ISAM2 compiling and fixed several issues but still some unit tests failing
parent
6843ee8227
commit
789f2bee97
|
|
@ -234,7 +234,10 @@ namespace gtsam {
|
|||
template<class CLIQUE>
|
||||
void BayesTree<CLIQUE>::fillNodesIndex(const sharedClique& subtree) {
|
||||
// Add each frontal variable of this root node
|
||||
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) { nodes_[j] = subtree; }
|
||||
BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) {
|
||||
bool inserted = nodes_.insert(std::make_pair(j, subtree)).second;
|
||||
assert(inserted); (void)inserted;
|
||||
}
|
||||
// Fill index for each child
|
||||
typedef typename BayesTree<CLIQUE>::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& child, subtree->children) {
|
||||
|
|
@ -336,7 +339,7 @@ namespace gtsam {
|
|||
// Factor the shortcuts to be conditioned on the full root
|
||||
// Get the set of variables to eliminate, which is C1\B.
|
||||
gttic(Full_root_factoring);
|
||||
shared_ptr p_C1_B; {
|
||||
boost::shared_ptr<EliminationTraitsType::BayesTreeType> p_C1_B; {
|
||||
std::vector<Index> C1_minus_B; {
|
||||
FastSet<Index> C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||
|
|
@ -348,7 +351,7 @@ namespace gtsam {
|
|||
boost::tie(p_C1_B, temp_remaining) =
|
||||
FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function);
|
||||
}
|
||||
shared_ptr p_C2_B; {
|
||||
boost::shared_ptr<EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||
std::vector<Index> C2_minus_B; {
|
||||
FastSet<Index> C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
||||
BOOST_FOREACH(const Index j, *B->conditional()) {
|
||||
|
|
@ -422,7 +425,7 @@ namespace gtsam {
|
|||
void BayesTree<CLIQUE>::removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans)
|
||||
{
|
||||
// base case is NULL, if so we do nothing and return empties above
|
||||
if (clique!=NULL) {
|
||||
if (clique) {
|
||||
|
||||
// remove the clique from orphans in case it has been added earlier
|
||||
orphans.remove(clique);
|
||||
|
|
|
|||
|
|
@ -30,126 +30,60 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::AddVariables(
|
||||
const Values& newTheta, Values& theta, VectorValues& delta,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd, vector<bool>& replacedKeys,
|
||||
Ordering& ordering, const KeyFormatter& keyFormatter) {
|
||||
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
const KeyFormatter& keyFormatter)
|
||||
{
|
||||
const bool debug = ISDEBUG("ISAM2 AddVariables");
|
||||
|
||||
theta.insert(newTheta);
|
||||
if(debug) newTheta.print("The new variables are: ");
|
||||
// Add the new keys onto the ordering, add zeros to the delta for the new variables
|
||||
std::vector<Index> dims(newTheta.dims(*newTheta.orderingArbitrary()));
|
||||
if(debug) cout << "New variables have total dimensionality " << accumulate(dims.begin(), dims.end(), 0) << endl;
|
||||
const size_t originalnVars = delta.size();
|
||||
delta.append(dims);
|
||||
deltaNewton.append(dims);
|
||||
RgProd.append(dims);
|
||||
for(Index j = originalnVars; j < delta.size(); ++j) {
|
||||
delta[j].setZero();
|
||||
deltaNewton[j].setZero();
|
||||
RgProd[j].setZero();
|
||||
}
|
||||
{
|
||||
Index nextVar = originalnVars;
|
||||
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
|
||||
ordering.insert(key_value.key, nextVar);
|
||||
if(debug) cout << "Adding variable " << keyFormatter(key_value.key) << " with order " << nextVar << endl;
|
||||
++ nextVar;
|
||||
}
|
||||
assert(ordering.size() == delta.size());
|
||||
}
|
||||
replacedKeys.resize(ordering.size(), false);
|
||||
// Add zeros into the VectorValues
|
||||
delta.insert(newTheta.zeroVectors());
|
||||
deltaNewton.insert(newTheta.zeroVectors());
|
||||
RgProd.insert(newTheta.zeroVectors());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
||||
void ISAM2::Impl::RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||
Values& theta, VariableIndex& variableIndex,
|
||||
VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables) {
|
||||
|
||||
// Get indices of unused keys
|
||||
vector<Index> unusedIndices; unusedIndices.reserve(unusedKeys.size());
|
||||
BOOST_FOREACH(Key key, unusedKeys) { unusedIndices.push_back(ordering[key]); }
|
||||
|
||||
// Create a permutation that shifts the unused variables to the end
|
||||
Permutation unusedToEnd = Permutation::PushToBack(unusedIndices, delta.size());
|
||||
Permutation unusedToEndInverse = *unusedToEnd.inverse();
|
||||
|
||||
// Use the permutation to remove unused variables while shifting all the others to take up the space
|
||||
variableIndex.permuteInPlace(unusedToEnd);
|
||||
variableIndex.removeUnusedAtEnd(unusedIndices.size());
|
||||
FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||
FastSet<Key>& fixedVariables)
|
||||
{
|
||||
// Create a vector of variable dimensions with the unused ones removed
|
||||
// by applying the unusedToEnd permutation to the original vector of
|
||||
// variable dimensions. We only allocate space in the shifted dims
|
||||
// vector for the used variables, so that the unused ones are dropped
|
||||
// when the permutation is applied.
|
||||
vector<size_t> originalDims = delta.dims();
|
||||
vector<size_t> dims(delta.size() - unusedIndices.size());
|
||||
unusedToEnd.applyToCollection(dims, originalDims);
|
||||
|
||||
// Copy from the old data structures to new ones, only iterating up to
|
||||
// the number of used variables, and applying the unusedToEnd permutation
|
||||
// in order to remove the unused variables.
|
||||
VectorValues newDelta(dims);
|
||||
VectorValues newDeltaNewton(dims);
|
||||
VectorValues newDeltaGradSearch(dims);
|
||||
std::vector<bool> newReplacedKeys(replacedKeys.size() - unusedIndices.size());
|
||||
Base::Nodes newNodes(replacedKeys.size() - unusedIndices.size());
|
||||
|
||||
for(size_t j = 0; j < dims.size(); ++j) {
|
||||
newDelta[j] = delta[unusedToEnd[j]];
|
||||
newDeltaNewton[j] = deltaNewton[unusedToEnd[j]];
|
||||
newDeltaGradSearch[j] = RgProd[unusedToEnd[j]];
|
||||
newReplacedKeys[j] = replacedKeys[unusedToEnd[j]];
|
||||
newNodes[j] = nodes[unusedToEnd[j]];
|
||||
}
|
||||
|
||||
// Swap the new data structures with the outputs of this function
|
||||
delta.swap(newDelta);
|
||||
deltaNewton.swap(newDeltaNewton);
|
||||
RgProd.swap(newDeltaGradSearch);
|
||||
replacedKeys.swap(newReplacedKeys);
|
||||
nodes.swap(newNodes);
|
||||
}
|
||||
|
||||
// Reorder and remove from ordering, solution, and fixed keys
|
||||
ordering.permuteInPlace(unusedToEnd);
|
||||
BOOST_REVERSE_FOREACH(Key key, unusedKeys) {
|
||||
Ordering::value_type removed = ordering.pop_back();
|
||||
assert(removed.first == key);
|
||||
variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end());
|
||||
BOOST_FOREACH(Key key, unusedKeys) {
|
||||
delta.erase(key);
|
||||
deltaNewton.erase(key);
|
||||
RgProd.erase(key);
|
||||
replacedKeys.erase(key);
|
||||
nodes.erase(key);
|
||||
theta.erase(key);
|
||||
fixedVariables.erase(key);
|
||||
}
|
||||
|
||||
// Finally, permute references to variables
|
||||
if(root)
|
||||
root->permuteWithInverse(unusedToEndInverse);
|
||||
linearFactors.permuteWithInverse(unusedToEndInverse);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||
{
|
||||
FastSet<Index> relinKeys;
|
||||
|
||||
if(relinearizeThreshold.type() == typeid(double)) {
|
||||
double threshold = boost::get<double>(relinearizeThreshold);
|
||||
for(Index var=0; var<delta.size(); ++var) {
|
||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= threshold) {
|
||||
relinKeys.insert(var);
|
||||
if(const double* threshold = boost::get<const double*>(relinearizeThreshold))
|
||||
{
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||
double maxDelta = key_delta.second.lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= *threshold)
|
||||
relinKeys.insert(key_delta.first);
|
||||
}
|
||||
}
|
||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
||||
const FastMap<char,Vector>& thresholds = boost::get<FastMap<char,Vector> >(relinearizeThreshold);
|
||||
BOOST_FOREACH(const Ordering::value_type& key_index, ordering) {
|
||||
const Vector& threshold = thresholds.find(Symbol(key_index.first).chr())->second;
|
||||
Index j = key_index.second;
|
||||
if(threshold.rows() != delta[j].rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
||||
if((delta[j].array().abs() > threshold.array()).any())
|
||||
relinKeys.insert(j);
|
||||
else if(const FastMap<char,Vector>* thresholds = boost::get<const FastMap<char,Vector>*>(relinearizeThreshold))
|
||||
{
|
||||
BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) {
|
||||
const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second;
|
||||
if(threshold.rows() != key_delta.second.rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||
if((key_delta.second.array().abs() > threshold.array()).any())
|
||||
relinKeys.insert(key_delta.first);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -157,11 +91,12 @@ FastSet<Index> ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold, const VectorValues& delta, const ISAM2Clique::shared_ptr& clique) {
|
||||
|
||||
void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double threshold,
|
||||
const VectorValues& delta, const ISAM2Clique::shared_ptr& clique)
|
||||
{
|
||||
// Check the current clique for relinearization
|
||||
bool relinearize = false;
|
||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
||||
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||
double maxDelta = delta[var].lpNorm<Eigen::Infinity>();
|
||||
if(maxDelta >= threshold) {
|
||||
relinKeys.insert(var);
|
||||
|
|
@ -171,31 +106,31 @@ void CheckRelinearizationRecursiveDouble(FastSet<Index>& relinKeys, double thres
|
|||
|
||||
// If this node was relinearized, also check its children
|
||||
if(relinearize) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds, const VectorValues& delta, const Ordering& ordering, const ISAM2Clique::shared_ptr& clique) {
|
||||
|
||||
void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<char,Vector>& thresholds,
|
||||
const VectorValues& delta,
|
||||
const ISAM2Clique::shared_ptr& clique)
|
||||
{
|
||||
// Check the current clique for relinearization
|
||||
bool relinearize = false;
|
||||
BOOST_FOREACH(Index var, clique->conditional()->keys()) {
|
||||
|
||||
// Lookup the key associated with this index
|
||||
Key key = ordering.key(var);
|
||||
|
||||
BOOST_FOREACH(Key var, *clique->conditional()) {
|
||||
// Find the threshold for this variable type
|
||||
const Vector& threshold = thresholds.find(Symbol(key).chr())->second;
|
||||
const Vector& threshold = thresholds.find(Symbol(var).chr())->second;
|
||||
|
||||
const Vector& deltaVar = delta[var];
|
||||
|
||||
// Verify the threshold vector matches the actual variable size
|
||||
if(threshold.rows() != delta[var].rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality");
|
||||
if(threshold.rows() != deltaVar.rows())
|
||||
throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(var).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality.");
|
||||
|
||||
// Check for relinearization
|
||||
if((delta[var].array().abs() > threshold.array()).any()) {
|
||||
if((deltaVar.array().abs() > threshold.array()).any()) {
|
||||
relinKeys.insert(var);
|
||||
relinearize = true;
|
||||
}
|
||||
|
|
@ -203,52 +138,54 @@ void CheckRelinearizationRecursiveMap(FastSet<Index>& relinKeys, const FastMap<c
|
|||
|
||||
// If this node was relinearized, also check its children
|
||||
if(relinearize) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, ordering, child);
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) {
|
||||
|
||||
FastSet<Index> ISAM2::Impl::CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||
const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold)
|
||||
{
|
||||
FastSet<Index> relinKeys;
|
||||
|
||||
if(root) {
|
||||
if(relinearizeThreshold.type() == typeid(double)) {
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots) {
|
||||
if(relinearizeThreshold.type() == typeid(double))
|
||||
CheckRelinearizationRecursiveDouble(relinKeys, boost::get<double>(relinearizeThreshold), delta, root);
|
||||
} else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>)) {
|
||||
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, ordering, root);
|
||||
else if(relinearizeThreshold.type() == typeid(FastMap<char,Vector>))
|
||||
CheckRelinearizationRecursiveMap(relinKeys, boost::get<FastMap<char,Vector> >(relinearizeThreshold), delta, root);
|
||||
}
|
||||
}
|
||||
|
||||
return relinKeys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const vector<bool>& markedMask) {
|
||||
void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet<Index>& keys, const FastSet<Key>& markedMask)
|
||||
{
|
||||
static const bool debug = false;
|
||||
// does the separator contain any of the variables?
|
||||
bool found = false;
|
||||
BOOST_FOREACH(const Index& key, (*clique)->parents()) {
|
||||
if (markedMask[key])
|
||||
BOOST_FOREACH(Key key, clique->conditional()->parents()) {
|
||||
if (markedMask.exists(key)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
// then add this clique
|
||||
keys.insert((*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
keys.insert(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals());
|
||||
if(debug) clique->print("Key(s) marked in clique ");
|
||||
if(debug) cout << "so marking key " << (*clique)->keys().front() << endl;
|
||||
if(debug) cout << "so marking key " << clique->conditional()->front() << endl;
|
||||
}
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children_) {
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
FindAll(child, keys, markedMask);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const Ordering& ordering,
|
||||
const vector<bool>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter) {
|
||||
void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta,
|
||||
const FastSet<Key>& mask, boost::optional<VectorValues&> invalidateIfDebug, const KeyFormatter& keyFormatter)
|
||||
{
|
||||
// If debugging, invalidate if requested, otherwise do not invalidate.
|
||||
// Invalidating means setting expmapped entries to Inf, to trigger assertions
|
||||
// if we try to re-use them.
|
||||
|
|
@ -256,23 +193,16 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
|||
invalidateIfDebug = boost::none;
|
||||
#endif
|
||||
|
||||
assert(values.size() == ordering.size());
|
||||
assert(delta.size() == ordering.size());
|
||||
assert(values.size() == delta.size());
|
||||
Values::iterator key_value;
|
||||
Ordering::const_iterator key_index;
|
||||
for(key_value = values.begin(), key_index = ordering.begin();
|
||||
key_value != values.end() && key_index != ordering.end(); ++key_value, ++key_index) {
|
||||
assert(key_value->key == key_index->first);
|
||||
const Index var = key_index->second;
|
||||
if(ISDEBUG("ISAM2 update verbose")) {
|
||||
if(mask[var])
|
||||
cout << "expmap " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
||||
else
|
||||
cout << " " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl;
|
||||
}
|
||||
VectorValues::const_iterator key_delta;
|
||||
for(key_value = values.begin(), key_delta = delta.begin(); key_value != values.end(); ++key_value, ++key_delta)
|
||||
{
|
||||
assert(key_value->key == key_delta->first);
|
||||
Key var = key_value->key;
|
||||
assert(delta[var].size() == (int)key_value->value.dim());
|
||||
assert(delta[var].unaryExpr(ptr_fun(isfinite<double>)).all());
|
||||
if(mask[var]) {
|
||||
if(mask.exists(var)) {
|
||||
Value* retracted = key_value->value.retract_(delta[var]);
|
||||
key_value->value = *retracted;
|
||||
retracted->deallocate_();
|
||||
|
|
@ -282,134 +212,35 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
ISAM2::Impl::PartialSolveResult
|
||||
ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
||||
const FastSet<Index>& keys, const ReorderingMode& reorderingMode, bool useQR) {
|
||||
|
||||
const bool debug = ISDEBUG("ISAM2 recalculate");
|
||||
|
||||
PartialSolveResult result;
|
||||
|
||||
gttic(select_affected_variables);
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
// Debug check that all variables involved in the factors to be re-eliminated
|
||||
// are in affectedKeys, since we will use it to select a subset of variables.
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
|
||||
BOOST_FOREACH(Index key, factor->keys()) {
|
||||
assert(find(keys.begin(), keys.end(), key) != keys.end());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
Permutation affectedKeysSelector(keys.size()); // Create a permutation that pulls the affected keys to the front
|
||||
Permutation affectedKeysSelectorInverse(keys.size() > 0 ? *keys.rbegin()+1 : 0 /*ordering_.nVars()*/); // And its inverse
|
||||
#ifndef NDEBUG
|
||||
// If debugging, fill with invalid values that will trip asserts if dereferenced
|
||||
std::fill(affectedKeysSelectorInverse.begin(), affectedKeysSelectorInverse.end(), numeric_limits<Index>::max());
|
||||
#endif
|
||||
{ Index position=0; BOOST_FOREACH(Index var, keys) {
|
||||
affectedKeysSelector[position] = var;
|
||||
affectedKeysSelectorInverse[var] = position;
|
||||
++ position; } }
|
||||
if(debug) affectedKeysSelector.print("affectedKeysSelector: ");
|
||||
if(debug) affectedKeysSelectorInverse.print("affectedKeysSelectorInverse: ");
|
||||
factors.permuteWithInverse(affectedKeysSelectorInverse);
|
||||
if(debug) factors.print("Factors to reorder/re-eliminate: ");
|
||||
gttoc(select_affected_variables);
|
||||
gttic(variable_index);
|
||||
VariableIndex affectedFactorsIndex(factors); // Create a variable index for the factors to be re-eliminated
|
||||
if(debug) affectedFactorsIndex.print("affectedFactorsIndex: ");
|
||||
gttoc(variable_index);
|
||||
gttic(ccolamd);
|
||||
vector<int> cmember(affectedKeysSelector.size(), 0);
|
||||
if(reorderingMode.constrain == ReorderingMode::CONSTRAIN_LAST) {
|
||||
assert(reorderingMode.constrainedKeys);
|
||||
if(!reorderingMode.constrainedKeys->empty()) {
|
||||
typedef std::pair<const Index,int> Index_Group;
|
||||
if(keys.size() > reorderingMode.constrainedKeys->size()) { // Only if some variables are unconstrained
|
||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second; }
|
||||
} else {
|
||||
int minGroup = *boost::range::min_element(boost::adaptors::values(*reorderingMode.constrainedKeys));
|
||||
BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) {
|
||||
cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second - minGroup; }
|
||||
}
|
||||
}
|
||||
}
|
||||
Permutation::shared_ptr affectedColamd(inference::PermutationCOLAMD_(affectedFactorsIndex, cmember));
|
||||
gttoc(ccolamd);
|
||||
gttic(ccolamd_permutations);
|
||||
Permutation::shared_ptr affectedColamdInverse(affectedColamd->inverse());
|
||||
if(debug) affectedColamd->print("affectedColamd: ");
|
||||
if(debug) affectedColamdInverse->print("affectedColamdInverse: ");
|
||||
result.reorderingSelector = affectedKeysSelector;
|
||||
result.reorderingPermutation = *affectedColamd;
|
||||
result.reorderingInverse = internal::Reduction::CreateFromPartialPermutation(affectedKeysSelector, *affectedColamdInverse);
|
||||
gttoc(ccolamd_permutations);
|
||||
|
||||
gttic(permute_affected_variable_index);
|
||||
affectedFactorsIndex.permuteInPlace(*affectedColamd);
|
||||
gttoc(permute_affected_variable_index);
|
||||
|
||||
gttic(permute_affected_factors);
|
||||
factors.permuteWithInverse(*affectedColamdInverse);
|
||||
gttoc(permute_affected_factors);
|
||||
|
||||
if(debug) factors.print("Colamd-ordered affected factors: ");
|
||||
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
VariableIndex fromScratchIndex(factors);
|
||||
assert(assert_equal(fromScratchIndex, affectedFactorsIndex));
|
||||
#endif
|
||||
|
||||
// eliminate into a Bayes net
|
||||
gttic(eliminate);
|
||||
JunctionTree<GaussianFactorGraph, ISAM2::Clique> jt(factors, affectedFactorsIndex);
|
||||
if(!useQR)
|
||||
result.bayesTree = jt.eliminate(EliminatePreferCholesky);
|
||||
else
|
||||
result.bayesTree = jt.eliminate(EliminateQR);
|
||||
gttoc(eliminate);
|
||||
|
||||
gttic(permute_eliminated);
|
||||
if(result.bayesTree) result.bayesTree->permuteWithInverse(affectedKeysSelector);
|
||||
if(debug && result.bayesTree) {
|
||||
cout << "Full var-ordered eliminated BT:\n";
|
||||
result.bayesTree->printTree("");
|
||||
}
|
||||
// Undo permutation on our subset of cached factors, we must later permute *all* of the cached factors
|
||||
factors.permuteWithInverse(*affectedColamd);
|
||||
factors.permuteWithInverse(affectedKeysSelector);
|
||||
gttoc(permute_eliminated);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace internal {
|
||||
inline static void optimizeInPlace(const boost::shared_ptr<ISAM2Clique>& clique, VectorValues& result) {
|
||||
// parents are assumed to already be solved and available in result
|
||||
clique->conditional()->solveInPlace(result);
|
||||
result.update(clique->conditional()->solve(result));
|
||||
|
||||
// starting from the root, call optimize on each conditional
|
||||
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children_)
|
||||
BOOST_FOREACH(const boost::shared_ptr<ISAM2Clique>& child, clique->children)
|
||||
optimizeInPlace(child, result);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
||||
size_t ISAM2::Impl::UpdateDelta(const std::vector<ISAM2::sharedClique>& roots, FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold) {
|
||||
|
||||
size_t lastBacksubVariableCount;
|
||||
|
||||
if (wildfireThreshold <= 0.0) {
|
||||
// Threshold is zero or less, so do a full recalculation
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||
internal::optimizeInPlace(root, delta);
|
||||
lastBacksubVariableCount = delta.size();
|
||||
|
||||
} else {
|
||||
// Optimize with wildfire
|
||||
lastBacksubVariableCount = optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, delta); // modifies delta_
|
||||
lastBacksubVariableCount = 0;
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, roots)
|
||||
lastBacksubVariableCount += optimizeWildfireNonRecursive(
|
||||
root, wildfireThreshold, replacedKeys, delta); // modifies delta
|
||||
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
for(size_t j=0; j<delta.size(); ++j)
|
||||
|
|
@ -418,22 +249,22 @@ size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std:
|
|||
}
|
||||
|
||||
// Clear replacedKeys
|
||||
replacedKeys.assign(replacedKeys.size(), false);
|
||||
replacedKeys.clear();
|
||||
|
||||
return lastBacksubVariableCount;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace internal {
|
||||
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std::vector<bool>& replacedKeys,
|
||||
const VectorValues& grad, VectorValues& deltaNewton, VectorValues& RgProd, size_t& varsUpdated) {
|
||||
void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const FastSet<Key>& replacedKeys,
|
||||
const VectorValues& grad, VectorValues& RgProd, size_t& varsUpdated) {
|
||||
|
||||
// Check if any frontal or separator keys were recalculated, if so, we need
|
||||
// update deltas and recurse to children, but if not, we do not need to
|
||||
// recurse further because of the running separator property.
|
||||
bool anyReplaced = false;
|
||||
BOOST_FOREACH(Index j, *clique->conditional()) {
|
||||
if(replacedKeys[j]) {
|
||||
BOOST_FOREACH(Key j, *clique->conditional()) {
|
||||
if(replacedKeys.exists(j)) {
|
||||
anyReplaced = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -442,41 +273,49 @@ void updateDoglegDeltas(const boost::shared_ptr<ISAM2Clique>& clique, const std:
|
|||
if(anyReplaced) {
|
||||
// Update the current variable
|
||||
// Get VectorValues slice corresponding to current variables
|
||||
Vector gR = internal::extractVectorValuesSlices(grad, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
Vector gS = internal::extractVectorValuesSlices(grad, (*clique)->beginParents(), (*clique)->endParents());
|
||||
Vector gR = grad.vector(std::vector<Key>(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()));
|
||||
Vector gS = grad.vector(std::vector<Key>(clique->conditional()->beginParents(), clique->conditional()->endParents()));
|
||||
|
||||
// Compute R*g and S*g for this clique
|
||||
Vector RSgProd = (*clique)->get_R() * gR + (*clique)->get_S() * gS;
|
||||
Vector RSgProd = clique->conditional()->get_R() * gR + clique->conditional()->get_S() * gS;
|
||||
|
||||
// Write into RgProd vector
|
||||
internal::writeVectorValuesSlices(RSgProd, RgProd, (*clique)->beginFrontals(), (*clique)->endFrontals());
|
||||
DenseIndex vectorPosition = 0;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
Vector& RgProdValue = RgProd[frontal];
|
||||
RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size());
|
||||
vectorPosition += RgProdValue.size();
|
||||
}
|
||||
|
||||
// Now solve the part of the Newton's method point for this clique (back-substitution)
|
||||
//(*clique)->solveInPlace(deltaNewton);
|
||||
|
||||
varsUpdated += (*clique)->nrFrontals();
|
||||
varsUpdated += clique->conditional()->nrFrontals();
|
||||
|
||||
// Recurse to children
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
|
||||
updateDoglegDeltas(child, replacedKeys, grad, deltaNewton, RgProd, varsUpdated); }
|
||||
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) {
|
||||
updateDoglegDeltas(child, replacedKeys, grad, RgProd, varsUpdated); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
||||
size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd) {
|
||||
|
||||
// Get gradient
|
||||
VectorValues grad = *allocateVectorValues(isam);
|
||||
VectorValues grad;
|
||||
gradientAtZero(isam, grad);
|
||||
|
||||
// Update variables
|
||||
size_t varsUpdated = 0;
|
||||
internal::updateDoglegDeltas(isam.root(), replacedKeys, grad, deltaNewton, RgProd, varsUpdated);
|
||||
optimizeWildfireNonRecursive(isam.root(), wildfireThreshold, replacedKeys, deltaNewton);
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, isam.roots())
|
||||
{
|
||||
internal::updateDoglegDeltas(root, replacedKeys, grad, RgProd, varsUpdated);
|
||||
optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, deltaNewton);
|
||||
}
|
||||
|
||||
replacedKeys.assign(replacedKeys.size(), false);
|
||||
replacedKeys.clear();
|
||||
|
||||
return varsUpdated;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,9 +26,6 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
|
||||
struct GTSAM_EXPORT PartialSolveResult {
|
||||
ISAM2::sharedClique bayesTree;
|
||||
Permutation reorderingSelector;
|
||||
Permutation reorderingPermutation;
|
||||
internal::Reduction reorderingInverse;
|
||||
};
|
||||
|
||||
struct GTSAM_EXPORT ReorderingMode {
|
||||
|
|
@ -48,16 +45,16 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||
*/
|
||||
static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd, std::vector<bool>& replacedKeys,
|
||||
Ordering& ordering, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
VectorValues& deltaNewton, VectorValues& RgProd,
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/**
|
||||
* Remove variables from the ISAM2 system.
|
||||
*/
|
||||
static void RemoveVariables(const FastSet<Key>& unusedKeys, const ISAM2Clique::shared_ptr& root,
|
||||
static void RemoveVariables(const FastSet<Key>& unusedKeys, const std::vector<ISAM2::sharedClique>& roots,
|
||||
Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton,
|
||||
VectorValues& RgProd, std::vector<bool>& replacedKeys, Ordering& ordering, Base::Nodes& nodes,
|
||||
GaussianFactorGraph& linearFactors, FastSet<Key>& fixedVariables);
|
||||
VectorValues& RgProd, FastSet<Key>& replacedKeys, Base::Nodes& nodes,
|
||||
FastSet<Key>& fixedVariables);
|
||||
|
||||
/**
|
||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||
|
|
@ -68,8 +65,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||
* equal to relinearizeThreshold
|
||||
*/
|
||||
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
static FastSet<Index> CheckRelinearizationFull(const VectorValues& delta,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||
|
||||
/**
|
||||
* Find the set of variables to be relinearized according to relinearizeThreshold.
|
||||
|
|
@ -82,8 +79,8 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @return The set of variable indices in delta whose magnitude is greater than or
|
||||
* equal to relinearizeThreshold
|
||||
*/
|
||||
static FastSet<Index> CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering,
|
||||
const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
static FastSet<Index> CheckRelinearizationPartial(const std::vector<ISAM2::sharedClique>& roots,
|
||||
const VectorValues& delta, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold);
|
||||
|
||||
/**
|
||||
* Recursively search this clique and its children for marked keys appearing
|
||||
|
|
@ -116,30 +113,14 @@ struct GTSAM_EXPORT ISAM2::Impl {
|
|||
* @param keyFormatter Formatter for printing nonlinear keys during debugging
|
||||
*/
|
||||
static void ExpmapMasked(Values& values, const VectorValues& delta,
|
||||
const Ordering& ordering, const std::vector<bool>& mask,
|
||||
const FastSet<Key>& mask,
|
||||
boost::optional<VectorValues&> invalidateIfDebug = boost::none,
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/**
|
||||
* Reorder and eliminate factors. These factors form a subset of the full
|
||||
* problem, so along with the BayesTree we get a partial reordering of the
|
||||
* problem that needs to be applied to the other data in ISAM2, which is the
|
||||
* VariableIndex, the delta, the ordering, and the orphans (including cached
|
||||
* factors).
|
||||
* \param factors The factors to eliminate, representing part of the full
|
||||
* problem. This is permuted during use and so is cleared when this function
|
||||
* returns in order to invalidate it.
|
||||
* \param keys The set of indices used in \c factors.
|
||||
* \param useQR Whether to use QR (if true), or Cholesky (if false).
|
||||
* \return The eliminated BayesTree and the permutation to be applied to the
|
||||
* rest of the ISAM2 data.
|
||||
*/
|
||||
static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet<Index>& keys,
|
||||
const ReorderingMode& reorderingMode, bool useQR);
|
||||
static size_t UpdateDelta(const std::vector<ISAM2::sharedClique>& roots,
|
||||
FastSet<Key>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
||||
|
||||
static size_t UpdateDelta(const boost::shared_ptr<ISAM2Clique>& root, std::vector<bool>& replacedKeys, VectorValues& delta, double wildfireThreshold);
|
||||
|
||||
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector<bool>& replacedKeys,
|
||||
static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet<Key>& replacedKeys,
|
||||
VectorValues& deltaNewton, VectorValues& RgProd);
|
||||
|
||||
};
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class VALUE>
|
||||
VALUE ISAM2::calculateEstimate(Key key) const {
|
||||
const Index index = getOrdering()[key];
|
||||
const Vector& delta = getDelta()[index];
|
||||
const Vector& delta = getDelta()[key];
|
||||
return theta_.at<VALUE>(key).retract(delta);
|
||||
}
|
||||
|
||||
|
|
@ -37,24 +36,25 @@ VALUE ISAM2::calculateEstimate(Key key) const {
|
|||
namespace internal {
|
||||
template<class CLIQUE>
|
||||
void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
||||
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||
{
|
||||
// if none of the variables in this clique (frontal and separator!) changed
|
||||
// significantly, then by the running intersection property, none of the
|
||||
// cliques in the children need to be processed
|
||||
|
||||
// Are any clique variables part of the tree that has been redone?
|
||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
||||
bool cliqueReplaced = replaced.exists((*clique)->frontals().front());
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
assert(cliqueReplaced == replaced[frontal]);
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
assert(cliqueReplaced == replaced.exists(frontal));
|
||||
}
|
||||
#endif
|
||||
|
||||
// If not redone, then has one of the separator variables changed significantly?
|
||||
bool recalculate = cliqueReplaced;
|
||||
if(!recalculate) {
|
||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
||||
if(changed[parent]) {
|
||||
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||
if(changed.exists(parent)) {
|
||||
recalculate = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -66,22 +66,22 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
if(recalculate) {
|
||||
|
||||
// Temporary copy of the original values, to check how much they change
|
||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
||||
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||
GaussianConditional::const_iterator it;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
||||
for(it = clique->conditional()->beginFrontals(); it!=clique->conditional()->endFrontals(); it++) {
|
||||
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||
}
|
||||
|
||||
// Back-substitute
|
||||
(*clique)->solveInPlace(delta);
|
||||
count += (*clique)->nrFrontals();
|
||||
delta.update(clique->conditional()->solve(delta));
|
||||
count += clique->conditional()->nrFrontals();
|
||||
|
||||
// Whether the values changed above a threshold, or always true if the
|
||||
// clique was replaced.
|
||||
bool valuesChanged = cliqueReplaced;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
if(!valuesChanged) {
|
||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
||||
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||
const Vector& newValue(delta[*it]);
|
||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||
valuesChanged = true;
|
||||
|
|
@ -94,18 +94,18 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
// If the values were above the threshold or this clique was replaced
|
||||
if(valuesChanged) {
|
||||
// Set changed flag for each frontal variable and leave the new values
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
changed[frontal] = true;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
changed.insert(frontal);
|
||||
}
|
||||
} else {
|
||||
// Replace with the old values
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse to children
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) {
|
||||
optimizeWildfire(child, threshold, changed, replaced, delta, count);
|
||||
}
|
||||
}
|
||||
|
|
@ -113,24 +113,25 @@ void optimizeWildfire(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
|||
|
||||
template<class CLIQUE>
|
||||
bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double threshold,
|
||||
std::vector<bool>& changed, const std::vector<bool>& replaced, VectorValues& delta, int& count) {
|
||||
FastSet<Key>& changed, const FastSet<Key>& replaced, VectorValues& delta, size_t& count)
|
||||
{
|
||||
// if none of the variables in this clique (frontal and separator!) changed
|
||||
// significantly, then by the running intersection property, none of the
|
||||
// cliques in the children need to be processed
|
||||
|
||||
// Are any clique variables part of the tree that has been redone?
|
||||
bool cliqueReplaced = replaced[(*clique)->frontals().front()];
|
||||
bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front());
|
||||
#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
assert(cliqueReplaced == replaced[frontal]);
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
assert(cliqueReplaced == replaced.exists(frontal));
|
||||
}
|
||||
#endif
|
||||
|
||||
// If not redone, then has one of the separator variables changed significantly?
|
||||
bool recalculate = cliqueReplaced;
|
||||
if(!recalculate) {
|
||||
BOOST_FOREACH(Index parent, (*clique)->parents()) {
|
||||
if(changed[parent]) {
|
||||
BOOST_FOREACH(Key parent, clique->conditional()->parents()) {
|
||||
if(changed.exists(parent)) {
|
||||
recalculate = true;
|
||||
break;
|
||||
}
|
||||
|
|
@ -139,25 +140,25 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
|
||||
// Solve clique if it was replaced, or if any parents were changed above the
|
||||
// threshold or themselves replaced.
|
||||
if(recalculate) {
|
||||
|
||||
if(recalculate)
|
||||
{
|
||||
// Temporary copy of the original values, to check how much they change
|
||||
std::vector<Vector> originalValues((*clique)->nrFrontals());
|
||||
std::vector<Vector> originalValues(clique->conditional()->nrFrontals());
|
||||
GaussianConditional::const_iterator it;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
originalValues[it - (*clique)->beginFrontals()] = delta[*it];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
originalValues[it - clique->conditional()->beginFrontals()] = delta[*it];
|
||||
}
|
||||
|
||||
// Back-substitute
|
||||
(*clique)->solveInPlace(delta);
|
||||
count += (*clique)->nrFrontals();
|
||||
delta.update(clique->conditional()->solve(delta));
|
||||
count += clique->conditional()->nrFrontals();
|
||||
|
||||
// Whether the values changed above a threshold, or always true if the
|
||||
// clique was replaced.
|
||||
bool valuesChanged = cliqueReplaced;
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
if(!valuesChanged) {
|
||||
const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]);
|
||||
const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]);
|
||||
const Vector& newValue(delta[*it]);
|
||||
if((oldValue - newValue).lpNorm<Eigen::Infinity>() >= threshold) {
|
||||
valuesChanged = true;
|
||||
|
|
@ -170,16 +171,15 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
// If the values were above the threshold or this clique was replaced
|
||||
if(valuesChanged) {
|
||||
// Set changed flag for each frontal variable and leave the new values
|
||||
BOOST_FOREACH(Index frontal, (*clique)->frontals()) {
|
||||
changed[frontal] = true;
|
||||
BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) {
|
||||
changed.insert(frontal);
|
||||
}
|
||||
} else {
|
||||
// Replace with the old values
|
||||
for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - (*clique)->beginFrontals()];
|
||||
for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) {
|
||||
delta[*it] = originalValues[it - clique->conditional()->beginFrontals()];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return recalculate;
|
||||
|
|
@ -189,8 +189,8 @@ bool optimizeWildfireNode(const boost::shared_ptr<CLIQUE>& clique, double thresh
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
||||
std::vector<bool> changed(keys.size(), false);
|
||||
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta) {
|
||||
FastSet<Key> changed;
|
||||
int count = 0;
|
||||
// starting from the root, call optimize on each conditional
|
||||
if(root)
|
||||
|
|
@ -200,9 +200,10 @@ int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root, double threshold, co
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const std::vector<bool>& keys, VectorValues& delta) {
|
||||
std::vector<bool> changed(keys.size(), false);
|
||||
int count = 0;
|
||||
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double threshold, const FastSet<Key>& keys, VectorValues& delta)
|
||||
{
|
||||
FastSet<Key> changed;
|
||||
size_t count = 0;
|
||||
|
||||
if (root) {
|
||||
std::stack<boost::shared_ptr<CLIQUE> > travStack;
|
||||
|
|
@ -213,7 +214,7 @@ int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root, double t
|
|||
travStack.pop();
|
||||
bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count);
|
||||
if (recalculate) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children_) {
|
||||
BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children) {
|
||||
travStack.push(child);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,7 +26,9 @@ namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
|||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/inference/BayesTree-inst.h>
|
||||
#include <gtsam/inference/BayesTreeCliqueBase-inst.h>
|
||||
#include <gtsam/inference/JunctionTree-inst.h> // We need the inst file because we'll make a special JT templated on ISAM2
|
||||
#include <gtsam/linear/linearAlgorithms-inst.h>
|
||||
#include <gtsam/linear/HessianFactor.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/GaussianEliminationTree.h>
|
||||
|
|
@ -36,9 +38,13 @@ namespace br { using namespace boost::range; using namespace boost::adaptors; }
|
|||
#include <gtsam/nonlinear/nonlinearExceptions.h>
|
||||
#include <gtsam/nonlinear/LinearContainerFactor.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using namespace std;
|
||||
// Instantiate base classes
|
||||
template class BayesTreeCliqueBase<ISAM2Clique, GaussianFactorGraph>;
|
||||
template class BayesTree<ISAM2Clique>;
|
||||
|
||||
static const bool disableReordering = false;
|
||||
static const double batchThreshold = 0.65;
|
||||
|
|
@ -113,14 +119,14 @@ std::string ISAM2Params::factorizationTranslator(const ISAM2Params::Factorizatio
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ISAM2Clique::setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult)
|
||||
void ISAM2Clique::setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult)
|
||||
{
|
||||
conditional_ = eliminationResult.first;
|
||||
cachedFactor_ = eliminationResult.second;
|
||||
// Compute gradient contribution
|
||||
gradientContribution_.resize(conditional_->cols() - 1);
|
||||
// Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons
|
||||
gradientContribution_ = -conditional_->get_R().transpose() * conditional_->get_d(),
|
||||
gradientContribution_ << -conditional_->get_R().transpose() * conditional_->get_d(),
|
||||
-conditional_->get_S().transpose() * conditional_->get_d();
|
||||
}
|
||||
|
||||
|
|
@ -159,37 +165,11 @@ ISAM2::ISAM2():
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
ISAM2::ISAM2(const ISAM2& other) {
|
||||
*this = other;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
ISAM2& ISAM2::operator=(const ISAM2& rhs)
|
||||
{
|
||||
// Copy BayesTree
|
||||
this->Base::operator=(rhs);
|
||||
|
||||
// Copy our variables
|
||||
theta_ = rhs.theta_;
|
||||
variableIndex_ = rhs.variableIndex_;
|
||||
delta_ = rhs.delta_;
|
||||
deltaNewton_ = rhs.deltaNewton_;
|
||||
RgProd_ = rhs.RgProd_;
|
||||
deltaDoglegUptodate_ = rhs.deltaDoglegUptodate_;
|
||||
deltaUptodate_ = rhs.deltaUptodate_;
|
||||
deltaReplacedMask_ = rhs.deltaReplacedMask_;
|
||||
nonlinearFactors_ = rhs.nonlinearFactors_;
|
||||
linearFactors_ = rhs.linearFactors_;
|
||||
params_ = rhs.params_;
|
||||
doglegDelta_ = rhs.doglegDelta_;
|
||||
lastAffectedVariableCount = rhs.lastAffectedVariableCount;
|
||||
lastAffectedFactorCount = rhs.lastAffectedFactorCount;
|
||||
lastAffectedCliqueCount = rhs.lastAffectedCliqueCount;
|
||||
lastAffectedMarkedCount = rhs.lastAffectedMarkedCount;
|
||||
lastBacksubVariableCount = rhs.lastBacksubVariableCount;
|
||||
lastNnzTop = rhs.lastNnzTop;
|
||||
|
||||
return *this;
|
||||
bool ISAM2::equals(const ISAM2& other, double tol) const {
|
||||
return Base::equals(other, tol)
|
||||
&& theta_.equals(other.theta_, tol) && variableIndex_.equals(other.variableIndex_, tol)
|
||||
&& nonlinearFactors_.equals(other.nonlinearFactors_, tol)
|
||||
&& fixedVariables_ == other.fixedVariables_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -337,12 +317,9 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
|||
gttic(removetop);
|
||||
Cliques orphans;
|
||||
GaussianBayesNet affectedBayesNet;
|
||||
this->removeTop(markedKeys, affectedBayesNet, orphans);
|
||||
this->removeTop(vector<Key>(markedKeys.begin(), markedKeys.end()), affectedBayesNet, orphans);
|
||||
gttoc(removetop);
|
||||
|
||||
if(debug) affectedBayesNet.print("Removed top: ");
|
||||
if(debug) orphans.print("Orphans: ");
|
||||
|
||||
// FactorGraph<GaussianFactor> factors(affectedBayesNet);
|
||||
// bug was here: we cannot reuse the original factors, because then the cached factors get messed up
|
||||
// [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries,
|
||||
|
|
@ -401,14 +378,14 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
|||
gttoc(linearize);
|
||||
|
||||
gttic(eliminate);
|
||||
ISAM2BayesTree bayesTree = *ISAM2JunctionTree(GaussianEliminationTree(linearized, variableIndex_, order))
|
||||
ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree(linearized, variableIndex_, order))
|
||||
.eliminate(params_.getEliminationFunction()).first;
|
||||
gttoc(eliminate);
|
||||
|
||||
gttic(insert);
|
||||
this->clear();
|
||||
BOOST_FOREACH(const sharedNode& root, bayesTree.roots())
|
||||
this->insertRoot(root);
|
||||
this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end());
|
||||
this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end());
|
||||
gttoc(insert);
|
||||
|
||||
result.variablesReeliminated = affectedKeysSet->size();
|
||||
|
|
@ -469,6 +446,13 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
|||
factors.push_back(cachedBoundary);
|
||||
gttoc(cached);
|
||||
|
||||
gttic(orphans);
|
||||
// Add the orphaned subtrees
|
||||
BOOST_FOREACH(const sharedClique& orphan, orphans)
|
||||
factors += boost::make_shared<BayesTreeOrphanWrapper<Clique> >(orphan);
|
||||
gttoc(orphans);
|
||||
|
||||
|
||||
// END OF COPIED CODE
|
||||
|
||||
// 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree])
|
||||
|
|
@ -482,38 +466,41 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
|||
affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end());
|
||||
gttoc(list_to_set);
|
||||
|
||||
gttic(PartialSolve);
|
||||
Impl::ReorderingMode reorderingMode;
|
||||
reorderingMode.nFullSystemVars = variableIndex_.size();
|
||||
reorderingMode.algorithm = Impl::ReorderingMode::COLAMD;
|
||||
reorderingMode.constrain = Impl::ReorderingMode::CONSTRAIN_LAST;
|
||||
VariableIndex affectedFactorsVarIndex(factors);
|
||||
|
||||
gttic(ordering_constraints);
|
||||
// Create ordering constraints
|
||||
FastMap<Key,int> constraintGroups;
|
||||
if(constrainKeys) {
|
||||
reorderingMode.constrainedKeys = *constrainKeys;
|
||||
constraintGroups = *constrainKeys;
|
||||
} else {
|
||||
reorderingMode.constrainedKeys = FastMap<Key,int>();
|
||||
constraintGroups = FastMap<Key,int>();
|
||||
const int group = observedKeys.size() < affectedFactorsVarIndex.size()
|
||||
? 1 : 0;
|
||||
BOOST_FOREACH(Key var, observedKeys)
|
||||
reorderingMode.constrainedKeys->insert(make_pair(var, 1));
|
||||
constraintGroups.insert(make_pair(var, group));
|
||||
}
|
||||
FastSet<Key> affectedUsedKeys = *affectedKeysSet; // Remove unused keys from the set we pass to PartialSolve
|
||||
BOOST_FOREACH(Key unused, unusedIndices)
|
||||
affectedUsedKeys.erase(unused);
|
||||
|
||||
// Remove unaffected keys from the constraints
|
||||
FastMap<Key,int>::iterator iter = reorderingMode.constrainedKeys->begin();
|
||||
while(iter != reorderingMode.constrainedKeys->end())
|
||||
if(affectedUsedKeys.find(iter->first) == affectedUsedKeys.end())
|
||||
reorderingMode.constrainedKeys->erase(iter++);
|
||||
else
|
||||
++iter;
|
||||
Impl::PartialSolveResult partialSolveResult =
|
||||
Impl::PartialSolve(factors, affectedUsedKeys, reorderingMode, (params_.factorization == ISAM2Params::QR));
|
||||
for(FastMap<Key,int>::iterator iter = constraintGroups.begin(); iter != constraintGroups.end(); ++iter) {
|
||||
if(unusedIndices.exists(iter->first) || !affectedKeysSet->exists(iter->first))
|
||||
constraintGroups.erase(iter);
|
||||
}
|
||||
gttoc(ordering_constraints);
|
||||
|
||||
// Generate ordering
|
||||
gttic(Ordering);
|
||||
Ordering ordering = Ordering::COLAMDConstrained(affectedFactorsVarIndex, constraintGroups);
|
||||
|
||||
ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree(
|
||||
factors, affectedFactorsVarIndex, ordering)).eliminate(params_.getEliminationFunction()).first;
|
||||
gttoc(PartialSolve);
|
||||
|
||||
gttoc(reorder_and_eliminate);
|
||||
|
||||
gttic(reassemble);
|
||||
if(partialSolveResult.bayesTree)
|
||||
BOOST_FOREACH(const sharedNode& root, *partialSolveResult.bayesTree.roots())
|
||||
this->insertRoot(root);
|
||||
this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end());
|
||||
this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end());
|
||||
gttoc(reassemble);
|
||||
|
||||
// 4. The orphans have already been inserted during elimination
|
||||
|
|
@ -524,7 +511,7 @@ boost::shared_ptr<FastSet<Key> > ISAM2::recalculate(const FastSet<Key>& markedKe
|
|||
// Root clique variables for detailed results
|
||||
if(params_.enableDetailedResults)
|
||||
BOOST_FOREACH(const sharedNode& root, this->roots())
|
||||
BOOST_FOREACH(Key var, *root)
|
||||
BOOST_FOREACH(Key var, *root->conditional())
|
||||
result.detail->variableStatus[var].inRootClique = true;
|
||||
|
||||
return affectedKeysSet;
|
||||
|
|
@ -585,7 +572,7 @@ ISAM2Result ISAM2::update(
|
|||
}
|
||||
|
||||
// Remove removed factors from the variable index so we do not attempt to relinearize them
|
||||
variableIndex_.remove(removeFactorIndices, removeFactors);
|
||||
variableIndex_.remove(removeFactorIndices.begin(), removeFactorIndices.end(), removeFactors);
|
||||
|
||||
// Compute unused keys and indices
|
||||
FastSet<Key> unusedKeys;
|
||||
|
|
@ -611,7 +598,7 @@ ISAM2Result ISAM2::update(
|
|||
|
||||
gttic(add_new_variables);
|
||||
// 2. Initialize any new variables \Theta_{new} and add \Theta:=\Theta\cup\Theta_{new}.
|
||||
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_, deltaReplacedMask_);
|
||||
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_);
|
||||
// New keys for detailed results
|
||||
if(params_.enableDetailedResults) {
|
||||
BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
|
||||
|
|
@ -743,9 +730,8 @@ ISAM2Result ISAM2::update(
|
|||
replacedKeys = recalculate(markedKeys, relinKeys, observedKeys, unusedIndices, constrainedKeys, result);
|
||||
|
||||
// Update replaced keys mask (accumulates until back-substitution takes place)
|
||||
if(replacedKeys) {
|
||||
BOOST_FOREACH(const Key var, *replacedKeys) {
|
||||
deltaReplacedMask_[var] = true; } }
|
||||
if(replacedKeys)
|
||||
deltaReplacedMask_.insert(replacedKeys->begin(), replacedKeys->end());
|
||||
gttoc(recalculate);
|
||||
|
||||
// After the top of the tree has been redone and may have index gaps from
|
||||
|
|
@ -753,8 +739,8 @@ ISAM2Result ISAM2::update(
|
|||
// in all data structures.
|
||||
if(!unusedKeys.empty()) {
|
||||
gttic(remove_variables);
|
||||
Impl::RemoveVariables(unusedKeys, root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||
deltaReplacedMask_, Base::nodes_, linearFactors_, fixedVariables_);
|
||||
Impl::RemoveVariables(unusedKeys, roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||
deltaReplacedMask_, Base::nodes_, fixedVariables_);
|
||||
gttoc(remove_variables);
|
||||
}
|
||||
result.cliques = this->nodes().size();
|
||||
|
|
@ -890,7 +876,8 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList)
|
|||
graph3.push_back(eliminationResult2.second);
|
||||
GaussianFactorGraph::EliminationResult eliminationResult3 =
|
||||
params_.getEliminationFunction()(graph3, Ordering(jPosition, clique->conditional()->endFrontals()));
|
||||
sharedClique newClique = boost::make_shared<Clique>(make_pair(eliminationResult3.first, clique->cachedFactor()));
|
||||
sharedClique newClique = boost::make_shared<Clique>();
|
||||
newClique->setEliminationResult(make_pair(eliminationResult3.first, clique->cachedFactor()));
|
||||
|
||||
// Add the marginalized clique to the BayesTree
|
||||
this->addClique(newClique, parent);
|
||||
|
|
@ -911,32 +898,32 @@ void ISAM2::marginalizeLeaves(const FastList<Key>& leafKeysList)
|
|||
if(clique_factor.second)
|
||||
factorsToAdd.push_back(clique_factor.second);
|
||||
nonlinearFactors_.push_back(boost::make_shared<LinearContainerFactor>(
|
||||
clique_factor.second, ordering_));
|
||||
clique_factor.second));
|
||||
if(params_.cacheLinearizedFactors)
|
||||
linearFactors_.push_back(clique_factor.second);
|
||||
BOOST_FOREACH(Index factorIndex, *clique_factor.second) {
|
||||
fixedVariables_.insert(ordering_.key(factorIndex)); }
|
||||
BOOST_FOREACH(Key factorKey, *clique_factor.second) {
|
||||
fixedVariables_.insert(factorKey); }
|
||||
}
|
||||
variableIndex_.augment(factorsToAdd); // Augment the variable index
|
||||
|
||||
// Remove the factors to remove that have been summarized in the newly-added marginal factors
|
||||
FastSet<size_t> factorIndicesToRemove;
|
||||
BOOST_FOREACH(Index j, indices) {
|
||||
BOOST_FOREACH(Key j, leafKeys) {
|
||||
factorIndicesToRemove.insert(variableIndex_[j].begin(), variableIndex_[j].end()); }
|
||||
vector<size_t> removedFactorIndices;
|
||||
SymbolicFactorGraph removedFactors;
|
||||
NonlinearFactorGraph removedFactors;
|
||||
BOOST_FOREACH(size_t i, factorIndicesToRemove) {
|
||||
removedFactorIndices.push_back(i);
|
||||
removedFactors.push_back(nonlinearFactors_[i]->symbolic(ordering_));
|
||||
removedFactors.push_back(nonlinearFactors_[i]);
|
||||
nonlinearFactors_.remove(i);
|
||||
if(params_.cacheLinearizedFactors)
|
||||
linearFactors_.remove(i);
|
||||
}
|
||||
variableIndex_.remove(removedFactorIndices, removedFactors);
|
||||
variableIndex_.remove(removedFactorIndices.begin(), removedFactorIndices.end(), removedFactors);
|
||||
|
||||
// Remove the marginalized variables
|
||||
Impl::RemoveVariables(FastSet<Key>(leafKeys.begin(), leafKeys.end()), root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||
deltaReplacedMask_, ordering_, nodes_, linearFactors_, fixedVariables_);
|
||||
Impl::RemoveVariables(FastSet<Key>(leafKeys.begin(), leafKeys.end()), roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||
deltaReplacedMask_, nodes_, fixedVariables_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -948,7 +935,7 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
boost::get<ISAM2GaussNewtonParams>(params_.optimizationParams);
|
||||
const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold;
|
||||
gttic(Wildfire_update);
|
||||
lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold);
|
||||
lastBacksubVariableCount = Impl::UpdateDelta(roots_, deltaReplacedMask_, delta_, effectiveWildfireThreshold);
|
||||
gttoc(Wildfire_update);
|
||||
|
||||
} else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) {
|
||||
|
|
@ -958,8 +945,11 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
|
||||
// Do one Dogleg iteration
|
||||
gttic(Dogleg_Iterate);
|
||||
VectorValues dx_u = gtsam::optimizeGradientSearch(*this);
|
||||
VectorValues dx_n = gtsam::optimize(*this);
|
||||
DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate(
|
||||
*doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose));
|
||||
*doglegDelta_, doglegParams.adaptationMode, dx_u, dx_n, *this, nonlinearFactors_,
|
||||
theta_, nonlinearFactors_.error(theta_), doglegParams.verbose));
|
||||
gttoc(Dogleg_Iterate);
|
||||
|
||||
gttic(Copy_dx_d);
|
||||
|
|
@ -974,8 +964,6 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
Values ISAM2::calculateEstimate() const {
|
||||
// We use ExpmapMasked here instead of regular expmap because the former
|
||||
// handles Permuted<VectorValues>
|
||||
gttic(Copy_Values);
|
||||
Values ret(theta_);
|
||||
gttoc(Copy_Values);
|
||||
|
|
@ -983,31 +971,25 @@ Values ISAM2::calculateEstimate() const {
|
|||
const VectorValues& delta(getDelta());
|
||||
gttoc(getDelta);
|
||||
gttic(Expmap);
|
||||
vector<bool> mask(ordering_.size(), true);
|
||||
Impl::ExpmapMasked(ret, delta, ordering_, mask);
|
||||
ret = ret.retract(delta);
|
||||
gttoc(Expmap);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
const Value& ISAM2::calculateEstimate(Key key) const {
|
||||
const Index index = getOrdering()[key];
|
||||
const Vector& delta = getDelta()[index];
|
||||
const Vector& delta = getDelta()[key];
|
||||
return *theta_.at(key).retract_(delta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Values ISAM2::calculateBestEstimate() const {
|
||||
VectorValues delta(theta_.dims(ordering_));
|
||||
internal::optimizeInPlace<Base>(this->root(), delta);
|
||||
return theta_.retract(delta, ordering_);
|
||||
return theta_.retract(internal::linearAlgorithms::optimizeBayesTree(*this));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix ISAM2::marginalCovariance(Index key) const {
|
||||
return marginalFactor(ordering_[key],
|
||||
params_.factorization == ISAM2Params::QR ? EliminateQR : EliminatePreferCholesky)
|
||||
->information().inverse();
|
||||
Matrix ISAM2::marginalCovariance(Key key) const {
|
||||
return marginalFactor(key, params_.getEliminationFunction())->information().inverse();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -1018,11 +1000,14 @@ const VectorValues& ISAM2::getDelta() const {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double ISAM2::error(const VectorValues& x) const
|
||||
{
|
||||
return GaussianFactorGraph(*this).error(x);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorValues optimize(const ISAM2& isam) {
|
||||
gttic(allocateVectorValues);
|
||||
VectorValues delta = *allocateVectorValues(isam);
|
||||
gttoc(allocateVectorValues);
|
||||
VectorValues delta;
|
||||
optimizeInPlace(isam, delta);
|
||||
return delta;
|
||||
}
|
||||
|
|
@ -1052,12 +1037,8 @@ void optimizeInPlace(const ISAM2& isam, VectorValues& delta) {
|
|||
/* ************************************************************************* */
|
||||
|
||||
VectorValues optimizeGradientSearch(const ISAM2& isam) {
|
||||
gttic(Allocate_VectorValues);
|
||||
VectorValues grad = *allocateVectorValues(isam);
|
||||
gttoc(Allocate_VectorValues);
|
||||
|
||||
VectorValues grad;
|
||||
optimizeGradientSearchInPlace(isam, grad);
|
||||
|
||||
return grad;
|
||||
}
|
||||
|
||||
|
|
@ -1086,35 +1067,38 @@ void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) {
|
|||
|
||||
gttic(Compute_minimizing_step_size);
|
||||
// Compute minimizing step size
|
||||
double RgNormSq = isam.RgProd_.asVector().squaredNorm();
|
||||
double RgNormSq = isam.RgProd_.vector().squaredNorm();
|
||||
double step = -gradientSqNorm / RgNormSq;
|
||||
gttoc(Compute_minimizing_step_size);
|
||||
|
||||
gttic(Compute_point);
|
||||
// Compute steepest descent point
|
||||
scal(step, grad);
|
||||
grad *= step;
|
||||
gttoc(Compute_point);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0) {
|
||||
return gradient(FactorGraph<JacobianFactor>(bayesTree), x0);
|
||||
return GaussianFactorGraph(bayesTree).gradient(x0);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static void gradientAtZeroTreeAdder(const boost::shared_ptr<ISAM2Clique>& root, VectorValues& g) {
|
||||
// Loop through variables in each clique, adding contributions
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) {
|
||||
const int dim = root->conditional()->dim(jit);
|
||||
g[*jit] += root->gradientContribution().segment(variablePosition, dim);
|
||||
const DenseIndex dim = root->conditional()->getDim(jit);
|
||||
pair<VectorValues::iterator, bool> pos_ins =
|
||||
g.tryInsert(*jit, root->gradientContribution().segment(variablePosition, dim));
|
||||
if(!pos_ins.second)
|
||||
pos_ins.first->second += root->gradientContribution().segment(variablePosition, dim);
|
||||
variablePosition += dim;
|
||||
}
|
||||
|
||||
// Recursively add contributions from children
|
||||
typedef boost::shared_ptr<ISAM2Clique> sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& child, root->children()) {
|
||||
BOOST_FOREACH(const sharedClique& child, root->children) {
|
||||
gradientAtZeroTreeAdder(child, g);
|
||||
}
|
||||
}
|
||||
|
|
@ -1125,8 +1109,8 @@ void gradientAtZero(const ISAM2& bayesTree, VectorValues& g) {
|
|||
g.setZero();
|
||||
|
||||
// Sum up contributions for each clique
|
||||
if(bayesTree.root())
|
||||
gradientAtZeroTreeAdder(bayesTree.root(), g);
|
||||
BOOST_FOREACH(const ISAM2::sharedClique& root, bayesTree.roots())
|
||||
gradientAtZeroTreeAdder(root, g);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -362,7 +362,7 @@ public:
|
|||
Vector gradientContribution_;
|
||||
|
||||
/// Overridden to also store the remaining factor and gradient contribution
|
||||
void setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult);
|
||||
void setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult);
|
||||
|
||||
/** Access the cached factor */
|
||||
Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
|
||||
|
|
@ -397,7 +397,7 @@ private:
|
|||
* estimate of all variables.
|
||||
*
|
||||
*/
|
||||
class ISAM2: public BayesTree<ISAM2Clique> {
|
||||
class GTSAM_EXPORT ISAM2: public BayesTree<ISAM2Clique> {
|
||||
|
||||
protected:
|
||||
|
||||
|
|
@ -440,7 +440,7 @@ protected:
|
|||
* This is \c mutable because it is used internally to not update delta_
|
||||
* until it is needed.
|
||||
*/
|
||||
mutable FastMap<Key,bool> deltaReplacedMask_; // TODO: Make sure accessed in the right way
|
||||
mutable FastSet<Key> deltaReplacedMask_; // TODO: Make sure accessed in the right way
|
||||
|
||||
/** All original nonlinear factors are stored here to use during relinearization */
|
||||
NonlinearFactorGraph nonlinearFactors_;
|
||||
|
|
@ -462,23 +462,19 @@ public:
|
|||
|
||||
typedef ISAM2 This; ///< This class
|
||||
typedef BayesTree<ISAM2Clique> Base; ///< The BayesTree base class
|
||||
|
||||
/** Create an empty ISAM2 instance */
|
||||
GTSAM_EXPORT ISAM2(const ISAM2Params& params);
|
||||
|
||||
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
||||
GTSAM_EXPORT ISAM2();
|
||||
|
||||
/** Copy constructor */
|
||||
GTSAM_EXPORT ISAM2(const ISAM2& other);
|
||||
|
||||
/** Assignment operator */
|
||||
GTSAM_EXPORT ISAM2& operator=(const ISAM2& rhs);
|
||||
|
||||
typedef Base::Clique Clique; ///< A clique
|
||||
typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique
|
||||
typedef Base::Cliques Cliques; ///< List of Clique typedef from base class
|
||||
|
||||
/** Create an empty ISAM2 instance */
|
||||
ISAM2(const ISAM2Params& params);
|
||||
|
||||
/** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */
|
||||
ISAM2();
|
||||
|
||||
/** Compare equality */
|
||||
bool equals(const ISAM2& other, double tol = 1e-9) const;
|
||||
|
||||
/**
|
||||
* Add new factors, updating the solution and relinearizing as needed.
|
||||
*
|
||||
|
|
@ -507,7 +503,7 @@ public:
|
|||
* of the size of the linear delta. This allows the provided keys to be reordered.
|
||||
* @return An ISAM2Result struct containing information about the update
|
||||
*/
|
||||
GTSAM_EXPORT ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
||||
ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
|
||||
const FastVector<size_t>& removeFactorIndices = FastVector<size_t>(),
|
||||
const boost::optional<FastMap<Key,int> >& constrainedKeys = boost::none,
|
||||
const boost::optional<FastList<Key> >& noRelinKeys = boost::none,
|
||||
|
|
@ -522,16 +518,24 @@ public:
|
|||
* fixed variables will include any involved with the marginalized variables
|
||||
* in the original factors, and possibly additional ones due to fill-in.
|
||||
*/
|
||||
GTSAM_EXPORT void marginalizeLeaves(const FastList<Key>& leafKeys);
|
||||
void marginalizeLeaves(const FastList<Key>& leafKeys);
|
||||
|
||||
/** Access the current linearization point */
|
||||
const Values& getLinearizationPoint() const { return theta_; }
|
||||
|
||||
/// Compute the current solution. This is the "standard" function for computing the solution that
|
||||
/// uses:
|
||||
/// - Partial relinearization and backsubstitution using the thresholds provided in ISAM2Params.
|
||||
/// - Dogleg trust-region step, if enabled in ISAM2Params.
|
||||
/// - Equivalent to getLinearizationPoint().retract(getDelta())
|
||||
/// The solution returned is in general not the same as that returned by getLinearizationPoint().
|
||||
Values optimize() const;
|
||||
|
||||
/** Compute an estimate from the incomplete linear delta computed during the last update.
|
||||
* This delta is incomplete because it was not updated below wildfire_threshold. If only
|
||||
* a single variable is needed, it is faster to call calculateEstimate(const KEY&).
|
||||
*/
|
||||
GTSAM_EXPORT Values calculateEstimate() const;
|
||||
Values calculateEstimate() const;
|
||||
|
||||
/** Compute an estimate for a single variable using its incomplete linear delta computed
|
||||
* during the last update. This is faster than calling the no-argument version of
|
||||
|
|
@ -549,10 +553,10 @@ public:
|
|||
* @param key
|
||||
* @return
|
||||
*/
|
||||
GTSAM_EXPORT const Value& calculateEstimate(Key key) const;
|
||||
const Value& calculateEstimate(Key key) const;
|
||||
|
||||
/** Return marginal on any variable as a covariance matrix */
|
||||
GTSAM_EXPORT Matrix marginalCovariance(Index key) const;
|
||||
Matrix marginalCovariance(Index key) const;
|
||||
|
||||
/// @name Public members for non-typical usage
|
||||
/// @{
|
||||
|
|
@ -562,16 +566,19 @@ public:
|
|||
|
||||
/** Compute an estimate using a complete delta computed by a full back-substitution.
|
||||
*/
|
||||
GTSAM_EXPORT Values calculateBestEstimate() const;
|
||||
Values calculateBestEstimate() const;
|
||||
|
||||
/** Access the current delta, computed during the last call to update */
|
||||
GTSAM_EXPORT const VectorValues& getDelta() const;
|
||||
const VectorValues& getDelta() const;
|
||||
|
||||
/** Compute the linear error */
|
||||
double error(const VectorValues& x) const;
|
||||
|
||||
/** Access the set of nonlinear factors */
|
||||
GTSAM_EXPORT const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||
const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||
|
||||
/** Access the nonlinear variable index */
|
||||
GTSAM_EXPORT const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
||||
const VariableIndex& getVariableIndex() const { return variableIndex_; }
|
||||
|
||||
size_t lastAffectedVariableCount;
|
||||
size_t lastAffectedFactorCount;
|
||||
|
|
@ -580,26 +587,26 @@ public:
|
|||
mutable size_t lastBacksubVariableCount;
|
||||
size_t lastNnzTop;
|
||||
|
||||
GTSAM_EXPORT const ISAM2Params& params() const { return params_; }
|
||||
const ISAM2Params& params() const { return params_; }
|
||||
|
||||
/** prints out clique statistics */
|
||||
GTSAM_EXPORT void printStats() const { getCliqueData().getStats().print(); }
|
||||
void printStats() const { getCliqueData().getStats().print(); }
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
|
||||
GTSAM_EXPORT FastList<size_t> getAffectedFactors(const FastList<Key>& keys) const;
|
||||
GTSAM_EXPORT GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const FastSet<Key>& relinKeys) const;
|
||||
GTSAM_EXPORT GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||
FastList<size_t> getAffectedFactors(const FastList<Key>& keys) const;
|
||||
GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList<Key>& affectedKeys, const FastSet<Key>& relinKeys) const;
|
||||
GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||
|
||||
GTSAM_EXPORT boost::shared_ptr<FastSet<Key> > recalculate(const FastSet<Key>& markedKeys, const FastSet<Key>& relinKeys,
|
||||
boost::shared_ptr<FastSet<Key> > recalculate(const FastSet<Key>& markedKeys, const FastSet<Key>& relinKeys,
|
||||
const FastVector<Key>& observedKeys, const FastSet<Key>& unusedIndices, const boost::optional<FastMap<Key,int> >& constrainKeys, ISAM2Result& result);
|
||||
// void linear_update(const GaussianFactorGraph& newFactors);
|
||||
GTSAM_EXPORT void updateDelta(bool forceFullSolve = false) const;
|
||||
void updateDelta(bool forceFullSolve = false) const;
|
||||
|
||||
GTSAM_EXPORT friend void optimizeInPlace(const ISAM2&, VectorValues&);
|
||||
GTSAM_EXPORT friend void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
||||
friend GTSAM_EXPORT void optimizeInPlace(const ISAM2&, VectorValues&);
|
||||
friend GTSAM_EXPORT void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&);
|
||||
|
||||
}; // ISAM2
|
||||
|
||||
|
|
@ -621,12 +628,12 @@ GTSAM_EXPORT void optimizeInPlace(const ISAM2& isam, VectorValues& delta);
|
|||
/// variables are contained in the subtree.
|
||||
/// @return The number of variables that were solved for
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
||||
size_t optimizeWildfire(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||
|
||||
template<class CLIQUE>
|
||||
int optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const std::vector<bool>& replaced, VectorValues& delta);
|
||||
size_t optimizeWildfireNonRecursive(const boost::shared_ptr<CLIQUE>& root,
|
||||
double threshold, const FastSet<Key>& replaced, VectorValues& delta);
|
||||
|
||||
/**
|
||||
* Optimize along the gradient direction, with a closed-form computation to
|
||||
|
|
|
|||
|
|
@ -6,11 +6,10 @@
|
|||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#if 0
|
||||
|
||||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/base/LieVector.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
#include <gtsam/geometry/Point2.h>
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
|
@ -27,9 +26,10 @@
|
|||
#include <tests/smallExample.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/assign.hpp>
|
||||
#include <boost/assign/list_of.hpp>
|
||||
using namespace boost::assign;
|
||||
#include <boost/range/adaptor/map.hpp>
|
||||
namespace br { using namespace boost::adaptors; using namespace boost::range; }
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
|
@ -37,6 +37,8 @@ using boost::shared_ptr;
|
|||
|
||||
const double tol = 1e-4;
|
||||
|
||||
#define TEST TEST_UNSAFE
|
||||
|
||||
// SETDEBUG("ISAM2 update", true);
|
||||
// SETDEBUG("ISAM2 update verbose", true);
|
||||
// SETDEBUG("ISAM2 recalculate", true);
|
||||
|
|
@ -48,7 +50,8 @@ SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas(Vector_(2, M_PI/100.0, 0.1
|
|||
ISAM2 createSlamlikeISAM2(
|
||||
boost::optional<Values&> init_values = boost::none,
|
||||
boost::optional<NonlinearFactorGraph&> full_graph = boost::none,
|
||||
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)) {
|
||||
const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true),
|
||||
size_t maxPoses = 10) {
|
||||
|
||||
// These variables will be reused and accumulate factors and values
|
||||
ISAM2 isam(params);
|
||||
|
|
@ -61,7 +64,7 @@ ISAM2 createSlamlikeISAM2(
|
|||
// Add a prior at time 0 and update isam
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -71,10 +74,13 @@ ISAM2 createSlamlikeISAM2(
|
|||
isam.update(newfactors, init);
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 0 to time 5
|
||||
for( ; i<5; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -82,14 +88,17 @@ ISAM2 createSlamlikeISAM2(
|
|||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||
|
||||
isam.update(newfactors, init);
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
}
|
||||
|
||||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -104,10 +113,13 @@ ISAM2 createSlamlikeISAM2(
|
|||
++ i;
|
||||
}
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
|
||||
// Add odometry from time 6 to time 10
|
||||
for( ; i<10; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -115,14 +127,17 @@ ISAM2 createSlamlikeISAM2(
|
|||
fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01));
|
||||
|
||||
isam.update(newfactors, init);
|
||||
|
||||
if(i > maxPoses)
|
||||
goto done;
|
||||
}
|
||||
|
||||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -133,6 +148,8 @@ ISAM2 createSlamlikeISAM2(
|
|||
++ i;
|
||||
}
|
||||
|
||||
done:
|
||||
|
||||
if (full_graph)
|
||||
*full_graph = fullgraph;
|
||||
|
||||
|
|
@ -142,185 +159,6 @@ ISAM2 createSlamlikeISAM2(
|
|||
return isam;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, ImplAddVariables) {
|
||||
|
||||
// Create initial state
|
||||
Values theta;
|
||||
theta.insert(0, Pose2(.1, .2, .3));
|
||||
theta.insert(100, Point2(.4, .5));
|
||||
Values newTheta;
|
||||
newTheta.insert(1, Pose2(.6, .7, .8));
|
||||
|
||||
VectorValues delta;
|
||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
||||
delta.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
VectorValues deltaNewton;
|
||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewton.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
VectorValues deltaRg;
|
||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRg.insert(1, Vector_(2, .4, .5));
|
||||
|
||||
vector<bool> replacedKeys(2, false);
|
||||
|
||||
Ordering ordering; ordering += 100, 0;
|
||||
|
||||
// Verify initial state
|
||||
LONGS_EQUAL(0, ordering[100]);
|
||||
LONGS_EQUAL(1, ordering[0]);
|
||||
EXPECT(assert_equal(delta[0], delta[ordering[100]]));
|
||||
EXPECT(assert_equal(delta[1], delta[ordering[0]]));
|
||||
|
||||
// Create expected state
|
||||
Values thetaExpected;
|
||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
||||
thetaExpected.insert(100, Point2(.4, .5));
|
||||
thetaExpected.insert(1, Pose2(.6, .7, .8));
|
||||
|
||||
VectorValues deltaExpected;
|
||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
VectorValues deltaNewtonExpected;
|
||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewtonExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaNewtonExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
VectorValues deltaRgExpected;
|
||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRgExpected.insert(1, Vector_(2, .4, .5));
|
||||
deltaRgExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0));
|
||||
|
||||
vector<bool> replacedKeysExpected(3, false);
|
||||
|
||||
Ordering orderingExpected; orderingExpected += 100, 0, 1;
|
||||
|
||||
// Expand initial state
|
||||
ISAM2::Impl::AddVariables(newTheta, theta, delta, deltaNewton, deltaRg, replacedKeys, ordering);
|
||||
|
||||
EXPECT(assert_equal(thetaExpected, theta));
|
||||
EXPECT(assert_equal(deltaExpected, delta));
|
||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
||||
EXPECT(assert_equal(orderingExpected, ordering));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, ImplRemoveVariables) {
|
||||
|
||||
// Create initial state
|
||||
Values theta;
|
||||
theta.insert(0, Pose2(.1, .2, .3));
|
||||
theta.insert(1, Pose2(.6, .7, .8));
|
||||
theta.insert(100, Point2(.4, .5));
|
||||
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(2)));
|
||||
sfg.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
||||
VariableIndex variableIndex(sfg);
|
||||
|
||||
VectorValues delta;
|
||||
delta.insert(0, Vector_(3, .1, .2, .3));
|
||||
delta.insert(1, Vector_(3, .4, .5, .6));
|
||||
delta.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaNewton;
|
||||
deltaNewton.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewton.insert(1, Vector_(3, .4, .5, .6));
|
||||
deltaNewton.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaRg;
|
||||
deltaRg.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRg.insert(1, Vector_(3, .4, .5, .6));
|
||||
deltaRg.insert(2, Vector_(2, .7, .8));
|
||||
|
||||
vector<bool> replacedKeys(3, false);
|
||||
replacedKeys[0] = true;
|
||||
replacedKeys[1] = false;
|
||||
replacedKeys[2] = true;
|
||||
|
||||
Ordering ordering; ordering += 100, 1, 0;
|
||||
|
||||
ISAM2::Nodes nodes(3);
|
||||
|
||||
// Verify initial state
|
||||
LONGS_EQUAL(0, ordering[100]);
|
||||
LONGS_EQUAL(1, ordering[1]);
|
||||
LONGS_EQUAL(2, ordering[0]);
|
||||
|
||||
// Create expected state
|
||||
Values thetaExpected;
|
||||
thetaExpected.insert(0, Pose2(.1, .2, .3));
|
||||
thetaExpected.insert(100, Point2(.4, .5));
|
||||
|
||||
SymbolicFactorGraph sfgRemoved;
|
||||
sfgRemoved.push_back(boost::make_shared<IndexFactor>(Index(0), Index(1)));
|
||||
sfgRemoved.push_back(SymbolicFactorGraph::sharedFactor()); // Add empty factor to keep factor indices consistent
|
||||
VariableIndex variableIndexExpected(sfgRemoved);
|
||||
|
||||
VectorValues deltaExpected;
|
||||
deltaExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaNewtonExpected;
|
||||
deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaNewtonExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
VectorValues deltaRgExpected;
|
||||
deltaRgExpected.insert(0, Vector_(3, .1, .2, .3));
|
||||
deltaRgExpected.insert(1, Vector_(2, .7, .8));
|
||||
|
||||
vector<bool> replacedKeysExpected(2, true);
|
||||
|
||||
Ordering orderingExpected; orderingExpected += 100, 0;
|
||||
|
||||
ISAM2::Nodes nodesExpected(2);
|
||||
|
||||
// Reduce initial state
|
||||
FastSet<Key> unusedKeys;
|
||||
unusedKeys.insert(1);
|
||||
vector<size_t> removedFactorsI; removedFactorsI.push_back(1);
|
||||
SymbolicFactorGraph removedFactors; removedFactors.push_back(sfg[1]);
|
||||
variableIndex.remove(removedFactorsI, removedFactors);
|
||||
GaussianFactorGraph linearFactors;
|
||||
FastSet<Key> fixedVariables;
|
||||
ISAM2::Impl::RemoveVariables(unusedKeys, ISAM2::sharedClique(), theta, variableIndex, delta, deltaNewton, deltaRg,
|
||||
replacedKeys, ordering, nodes, linearFactors, fixedVariables);
|
||||
|
||||
EXPECT(assert_equal(thetaExpected, theta));
|
||||
EXPECT(assert_equal(variableIndexExpected, variableIndex));
|
||||
EXPECT(assert_equal(deltaExpected, delta));
|
||||
EXPECT(assert_equal(deltaNewtonExpected, deltaNewton));
|
||||
EXPECT(assert_equal(deltaRgExpected, deltaRg));
|
||||
EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys));
|
||||
EXPECT(assert_equal(orderingExpected, ordering));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//TEST(ISAM2, IndicesFromFactors) {
|
||||
//
|
||||
// using namespace gtsam::planarSLAM;
|
||||
// typedef GaussianISAM2<Values>::Impl Impl;
|
||||
//
|
||||
// Ordering ordering; ordering += (0), (0), (1);
|
||||
// NonlinearFactorGraph graph;
|
||||
// graph.add(PriorFactor<Pose2>((0), Pose2(), noiseModel::Unit::Create(Pose2::dimension));
|
||||
// graph.addRange((0), (0), 1.0, noiseModel::Unit::Create(1));
|
||||
//
|
||||
// FastSet<Index> expected;
|
||||
// expected.insert(0);
|
||||
// expected.insert(1);
|
||||
//
|
||||
// FastSet<Index> actual = Impl::IndicesFromFactors(ordering, graph);
|
||||
//
|
||||
// EXPECT(assert_equal(expected, actual));
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
//TEST(ISAM2, CheckRelinearization) {
|
||||
//
|
||||
|
|
@ -355,37 +193,29 @@ TEST(ISAM2, ImplRemoveVariables) {
|
|||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, optimize2) {
|
||||
|
||||
// Create initialization
|
||||
Values theta;
|
||||
theta.insert((0), Pose2(0.01, 0.01, 0.01));
|
||||
|
||||
// Create conditional
|
||||
Vector d(3); d << -0.1, -0.1, -0.31831;
|
||||
Matrix R(3,3); R <<
|
||||
10, 0.0, 0.0,
|
||||
0.0, 10, 0.0,
|
||||
0.0, 0.0, 31.8309886;
|
||||
GaussianConditional::shared_ptr conditional(new GaussianConditional(0, d, R, Vector::Ones(3)));
|
||||
|
||||
// Create ordering
|
||||
Ordering ordering; ordering += (0);
|
||||
|
||||
// Expected vector
|
||||
VectorValues expected(1, 3);
|
||||
conditional->solveInPlace(expected);
|
||||
|
||||
// Clique
|
||||
ISAM2::sharedClique clique(
|
||||
ISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr())));
|
||||
VectorValues actual(theta.dims(ordering));
|
||||
internal::optimizeInPlace<ISAM2::Base>(clique, actual);
|
||||
|
||||
// expected.print("expected: ");
|
||||
// actual.print("actual: ");
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
struct ConsistencyVisitor
|
||||
{
|
||||
bool consistent;
|
||||
const ISAM2& isam;
|
||||
ConsistencyVisitor(const ISAM2& isam) :
|
||||
consistent(true), isam(isam) {}
|
||||
int operator()(const ISAM2::sharedClique& node, int& parentData)
|
||||
{
|
||||
if(std::find(isam.roots().begin(), isam.roots().end(), node) == isam.roots().end())
|
||||
{
|
||||
if(node->parent_.expired())
|
||||
consistent = false;
|
||||
if(std::find(node->parent()->children.begin(), node->parent()->children.end(), node) == node->parent()->children.end())
|
||||
consistent = false;
|
||||
}
|
||||
BOOST_FOREACH(Key j, node->conditional()->frontals())
|
||||
{
|
||||
if(isam.nodes().at(j).get() != node.get())
|
||||
consistent = false;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const ISAM2& isam, Test& test, TestResult& result) {
|
||||
|
|
@ -394,31 +224,35 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
const std::string name_ = test.getName();
|
||||
|
||||
Values actual = isam.calculateEstimate();
|
||||
Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first;
|
||||
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering);
|
||||
// linearized.print("Expected linearized: ");
|
||||
GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate();
|
||||
// gbn.print("Expected bayesnet: ");
|
||||
VectorValues delta = optimize(gbn);
|
||||
Values expected = fullinit.retract(delta, ordering);
|
||||
Values expected = fullinit.retract(fullgraph.linearize(fullinit)->optimize());
|
||||
|
||||
bool isamEqual = assert_equal(expected, actual);
|
||||
|
||||
// Check information
|
||||
ISAM2 expectedisam(isam.params());
|
||||
expectedisam.update(fullgraph, fullinit);
|
||||
bool isamTreeEqual = assert_equal(GaussianFactorGraph(expectedisam).augmentedHessian(), GaussianFactorGraph(isam).augmentedHessian());
|
||||
|
||||
// Check consistency
|
||||
ConsistencyVisitor visitor(isam);
|
||||
int data; // Unused
|
||||
treeTraversal::DepthFirstForest(isam, data, visitor);
|
||||
bool consistent = visitor.consistent;
|
||||
|
||||
// The following two checks make sure that the cached gradients are maintained and used correctly
|
||||
|
||||
// Check gradient at each node
|
||||
bool nodeGradientsOk = true;
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
bool gradOk = assert_equal(expectedGradient[*jit], actual);
|
||||
EXPECT(gradOk);
|
||||
|
|
@ -431,17 +265,30 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c
|
|||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
bool expectedGradOk = assert_equal(expectedGradient2, expectedGradient);
|
||||
EXPECT(expectedGradOk);
|
||||
bool totalGradOk = assert_equal(expectedGradient, actualGradient);
|
||||
EXPECT(totalGradOk);
|
||||
|
||||
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual;
|
||||
return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual && isamTreeEqual && consistent;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, simple)
|
||||
{
|
||||
for(size_t i = 0; i < 10; ++i) {
|
||||
// These variables will be reused and accumulate factors and values
|
||||
Values fullinit;
|
||||
NonlinearFactorGraph fullgraph;
|
||||
ISAM2 isam = createSlamlikeISAM2(fullinit, fullgraph, ISAM2Params(ISAM2GaussNewtonParams(0.0), 0.0, 0, false), i);
|
||||
|
||||
// Compare solutions
|
||||
CHECK(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -505,8 +352,8 @@ TEST(ISAM2, clone) {
|
|||
|
||||
// Modify original isam
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(BetweenFactor<Pose2>(0, 10,
|
||||
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3)));
|
||||
factors += BetweenFactor<Pose2>(0, 10,
|
||||
isam.calculateEstimate<Pose2>(0).between(isam.calculateEstimate<Pose2>(10)), noiseModel::Unit::Create(3));
|
||||
isam.update(factors);
|
||||
|
||||
CHECK(assert_equal(createSlamlikeISAM2(), clone2));
|
||||
|
|
@ -526,66 +373,6 @@ TEST(ISAM2, clone) {
|
|||
CHECK(assert_equal(ISAM2(), clone1));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, permute_cached) {
|
||||
typedef boost::shared_ptr<ISAM2Clique> sharedISAM2Clique;
|
||||
|
||||
// Construct expected permuted BayesTree (variable 2 has been changed to 1)
|
||||
BayesTree<GaussianConditional, ISAM2Clique> expected;
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(3, Matrix_(1,1,1.0))
|
||||
(4, Matrix_(1,1,2.0)),
|
||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
||||
HessianFactor::shared_ptr())))); // Cached: empty
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(2, Matrix_(1,1,1.0))
|
||||
(3, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
||||
expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(0, Matrix_(1,1,1.0))
|
||||
(2, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
||||
boost::make_shared<HessianFactor>(1, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(1)
|
||||
// Change variable 2 to 1
|
||||
expected.root()->children().front()->conditional()->keys()[0] = 1;
|
||||
expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1;
|
||||
|
||||
// Construct unpermuted BayesTree
|
||||
BayesTree<GaussianConditional, ISAM2Clique> actual;
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(3, Matrix_(1,1,1.0))
|
||||
(4, Matrix_(1,1,2.0)),
|
||||
2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4)
|
||||
HessianFactor::shared_ptr())))); // Cached: empty
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(2, Matrix_(1,1,1.0))
|
||||
(3, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3)
|
||||
boost::make_shared<HessianFactor>(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3)
|
||||
actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair(
|
||||
boost::make_shared<GaussianConditional>(pair_list_of
|
||||
(0, Matrix_(1,1,1.0))
|
||||
(2, Matrix_(1,1,2.0)),
|
||||
1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2)
|
||||
boost::make_shared<HessianFactor>(2, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(2)
|
||||
|
||||
// Create permutation that changes variable 2 -> 0
|
||||
Permutation permutation = Permutation::Identity(5);
|
||||
permutation[2] = 1;
|
||||
|
||||
// Permute BayesTree
|
||||
actual.root()->permuteWithInverse(permutation);
|
||||
|
||||
// Check
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(ISAM2, removeFactors)
|
||||
{
|
||||
|
|
@ -650,8 +437,8 @@ TEST(ISAM2, swapFactors)
|
|||
fullgraph.remove(swap_idx);
|
||||
|
||||
NonlinearFactorGraph swapfactors;
|
||||
// swapfactors.add(BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); // original factor
|
||||
swapfactors.add(BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise));
|
||||
// swapfactors += BearingRange<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise; // original factor
|
||||
swapfactors += BearingRangeFactor<Pose2,Point2>(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise);
|
||||
fullgraph.push_back(swapfactors);
|
||||
isam.update(swapfactors, Values(), toRemove);
|
||||
}
|
||||
|
|
@ -662,28 +449,26 @@ TEST(ISAM2, swapFactors)
|
|||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||
variablePosition += dim;
|
||||
}
|
||||
EXPECT_LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
||||
EXPECT_LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||
|
|
@ -708,7 +493,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add a prior at time 0 and update isam
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += PriorFactor<Pose2>(0, Pose2(0.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -723,7 +508,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 0 to time 5
|
||||
for( ; i<5; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -739,9 +524,9 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 5 to 6 and landmark measurement at time 5
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -759,7 +544,7 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 6 to time 10
|
||||
for( ; i<10; ++i) {
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -772,9 +557,9 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Add odometry from time 10 to 11 and landmark measurement at time 10
|
||||
{
|
||||
NonlinearFactorGraph newfactors;
|
||||
newfactors.add(BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors.add(BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise));
|
||||
newfactors += BetweenFactor<Pose2>(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
newfactors += BearingRangeFactor<Pose2,Point2>(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise);
|
||||
fullgraph.push_back(newfactors);
|
||||
|
||||
Values init;
|
||||
|
|
@ -788,33 +573,28 @@ TEST(ISAM2, constrained_ordering)
|
|||
// Compare solutions
|
||||
EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_));
|
||||
|
||||
// Check that x3 and x4 are last, but either can come before the other
|
||||
EXPECT(isam.getOrdering()[(3)] == 12 && isam.getOrdering()[(4)] == 13);
|
||||
|
||||
// Check gradient at each node
|
||||
typedef ISAM2::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
GaussianFactorGraph jfg;
|
||||
jfg += clique->conditional();
|
||||
VectorValues expectedGradient = jfg.gradientAtZero();
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
DenseIndex variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
const DenseIndex dim = clique->conditional()->getDim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||
variablePosition += dim;
|
||||
}
|
||||
LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
||||
LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition);
|
||||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero();
|
||||
VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient));
|
||||
VectorValues actualGradient;
|
||||
gradientAtZero(isam, actualGradient);
|
||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||
|
|
@ -838,16 +618,12 @@ namespace {
|
|||
bool checkMarginalizeLeaves(ISAM2& isam, const FastList<Key>& leafKeys) {
|
||||
Matrix expectedAugmentedHessian, expected3AugmentedHessian;
|
||||
vector<Index> toKeep;
|
||||
const Index lastVar = isam.getOrdering().size() - 1;
|
||||
for(Index i=0; i<=lastVar; ++i)
|
||||
if(find(leafKeys.begin(), leafKeys.end(), isam.getOrdering().key(i)) == leafKeys.end())
|
||||
toKeep.push_back(i);
|
||||
BOOST_FOREACH(Key j, isam.getDelta() | br::map_keys)
|
||||
if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end())
|
||||
toKeep.push_back(j);
|
||||
|
||||
// Calculate expected marginal from iSAM2 tree
|
||||
GaussianFactorGraph isamAsGraph(isam);
|
||||
GaussianSequentialSolver solver(isamAsGraph);
|
||||
GaussianFactorGraph marginalgfg = *solver.jointFactorGraph(toKeep);
|
||||
expectedAugmentedHessian = marginalgfg.augmentedHessian();
|
||||
expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep)->augmentedHessian();
|
||||
|
||||
//// Calculate expected marginal from cached linear factors
|
||||
//assert(isam.params().cacheLinearizedFactors);
|
||||
|
|
@ -855,10 +631,8 @@ namespace {
|
|||
//expected2AugmentedHessian = solver2.jointFactorGraph(toKeep)->augmentedHessian();
|
||||
|
||||
// Calculate expected marginal from original nonlinear factors
|
||||
GaussianSequentialSolver solver3(
|
||||
*isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint(), isam.getOrdering()),
|
||||
isam.params().factorization == ISAM2Params::QR);
|
||||
expected3AugmentedHessian = solver3.jointFactorGraph(toKeep)->augmentedHessian();
|
||||
expected3AugmentedHessian = isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint())
|
||||
->marginal(toKeep)->augmentedHessian();
|
||||
|
||||
// Do marginalization
|
||||
isam.marginalizeLeaves(leafKeys);
|
||||
|
|
@ -868,7 +642,7 @@ namespace {
|
|||
Matrix actualAugmentedHessian = actualMarginalGraph.augmentedHessian();
|
||||
//Matrix actual2AugmentedHessian = linearFactors_.augmentedHessian();
|
||||
Matrix actual3AugmentedHessian = isam.getFactorsUnsafe().linearize(
|
||||
isam.getLinearizationPoint(), isam.getOrdering())->augmentedHessian();
|
||||
isam.getLinearizationPoint())->augmentedHessian();
|
||||
assert(actualAugmentedHessian.unaryExpr(std::ptr_fun(&std::isfinite<double>)).all());
|
||||
|
||||
// Check full marginalization
|
||||
|
|
@ -893,11 +667,11 @@ TEST(ISAM2, marginalizeLeaves1)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -911,8 +685,7 @@ TEST(ISAM2, marginalizeLeaves1)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -922,12 +695,12 @@ TEST(ISAM2, marginalizeLeaves2)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -943,8 +716,7 @@ TEST(ISAM2, marginalizeLeaves2)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -954,17 +726,17 @@ TEST(ISAM2, marginalizeLeaves3)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(0, 1, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(2, 3, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
factors.add(BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += BetweenFactor<LieVector>(3, 4, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(4, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(3, 5, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -984,8 +756,7 @@ TEST(ISAM2, marginalizeLeaves3)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> leafKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -995,9 +766,9 @@ TEST(ISAM2, marginalizeLeaves4)
|
|||
ISAM2 isam;
|
||||
|
||||
NonlinearFactorGraph factors;
|
||||
factors.add(PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors.add(BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)));
|
||||
factors += PriorFactor<LieVector>(0, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(0, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
factors += BetweenFactor<LieVector>(1, 2, LieVector(0.0), noiseModel::Unit::Create(1));
|
||||
|
||||
Values values;
|
||||
values.insert(0, LieVector(0.0));
|
||||
|
|
@ -1011,8 +782,7 @@ TEST(ISAM2, marginalizeLeaves4)
|
|||
|
||||
isam.update(factors, values, FastVector<size_t>(), constrainedKeys);
|
||||
|
||||
FastList<Key> leafKeys;
|
||||
leafKeys.push_back(isam.getOrdering().key(1));
|
||||
FastList<Key> leafKeys = list_of(1);
|
||||
EXPECT(checkMarginalizeLeaves(isam, leafKeys));
|
||||
}
|
||||
|
||||
|
|
@ -1023,8 +793,7 @@ TEST(ISAM2, marginalizeLeaves5)
|
|||
ISAM2 isam = createSlamlikeISAM2();
|
||||
|
||||
// Marginalize
|
||||
FastList<Key> marginalizeKeys;
|
||||
marginalizeKeys.push_back(isam.getOrdering().key(0));
|
||||
FastList<Key> marginalizeKeys = list_of(0);
|
||||
EXPECT(checkMarginalizeLeaves(isam, marginalizeKeys));
|
||||
}
|
||||
|
||||
|
|
@ -1040,8 +809,6 @@ TEST(ISAM2, marginalCovariance)
|
|||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
Loading…
Reference in New Issue