Merged simplelinear branch into trunk
parent
0fb6c1320e
commit
1d52ff90a8
4
.project
4
.project
|
|
@ -29,6 +29,10 @@
|
|||
<key>org.eclipse.cdt.make.core.buildCommand</key>
|
||||
<value>make</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.buildLocation</key>
|
||||
<value>${workspace_loc:/gtsam/build}</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.cleanBuildTarget</key>
|
||||
<value>clean</value>
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
#Wed Aug 18 18:00:45 EDT 2010
|
||||
#Wed Oct 06 11:57:41 EDT 2010
|
||||
eclipse.preferences.version=1
|
||||
environment/project/cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.1441575890/append=true
|
||||
environment/project/cdt.managedbuild.toolchain.gnu.macosx.base.1359703544.1441575890/appendContributed=true
|
||||
environment/project/cdt.managedbuild.toolchain.gnu.macosx.base.1359703544/append=true
|
||||
environment/project/cdt.managedbuild.toolchain.gnu.macosx.base.1359703544/appendContributed=true
|
||||
|
|
|
|||
|
|
@ -31,12 +31,15 @@ TestRegistry& TestRegistry::instance ()
|
|||
void TestRegistry::add (Test *test)
|
||||
{
|
||||
if (tests == 0) {
|
||||
test->setNext(0);
|
||||
tests = test;
|
||||
lastTest = test;
|
||||
return;
|
||||
}
|
||||
|
||||
test->setNext (tests);
|
||||
tests = test;
|
||||
test->setNext (0);
|
||||
lastTest->setNext(test);
|
||||
lastTest = test;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ private:
|
|||
|
||||
|
||||
Test *tests;
|
||||
Test *lastTest;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -29,9 +29,14 @@ endif
|
|||
|
||||
# The following lines specify the actual shared library to be built with libtool
|
||||
lib_LTLIBRARIES = libgtsam.la
|
||||
libgtsam_la_SOURCES =
|
||||
libgtsam_la_SOURCES =
|
||||
nodist_EXTRA_libgtsam_la_SOURCES = dummy.cxx
|
||||
libgtsam_la_LIBADD = $(SUBLIBS)
|
||||
libgtsam_la_LDFLAGS = -version-info 0:0:0
|
||||
libgtsam_la_LDFLAGS = -no-undefined -version-info 0:0:0
|
||||
|
||||
if USE_ACCELERATE_MACOS
|
||||
libgtsam_la_LDFLAGS += -Wl,/System/Library/Frameworks/Accelerate.framework/Accelerate
|
||||
endif
|
||||
|
||||
# Add these files to make sure they're in the distribution
|
||||
noinst_HEADERS = gtsam.h
|
||||
|
|
|
|||
|
|
@ -23,7 +23,8 @@ check_PROGRAMS += tests/testSPQRUtil
|
|||
endif
|
||||
|
||||
# Testing
|
||||
headers += Testable.h TestableAssertions.h numericalDerivative.h
|
||||
headers += Testable.h TestableAssertions.h numericalDerivative.h
|
||||
sources += timing.cpp
|
||||
|
||||
# Lie Groups
|
||||
headers += Lie.h Lie-inl.h lieProxies.h LieScalar.h
|
||||
|
|
@ -36,7 +37,7 @@ sources += DSFVector.cpp
|
|||
check_PROGRAMS += tests/testBTree tests/testDSF tests/testDSFVector
|
||||
|
||||
# Timing tests
|
||||
noinst_PROGRAMS = tests/timeMatrix
|
||||
noinst_PROGRAMS = tests/timeMatrix tests/timeublas
|
||||
|
||||
#----------------------------------------------------------------------------------------------------
|
||||
# Create a libtool library that is not installed
|
||||
|
|
@ -85,7 +86,7 @@ AM_LDFLAGS += -llapack
|
|||
endif
|
||||
|
||||
if USE_ACCELERATE_MACOS
|
||||
AM_LDFLAGS += -framework Accelerate
|
||||
AM_LDFLAGS += -Wl,/System/Library/Frameworks/Accelerate.framework/Accelerate
|
||||
endif
|
||||
|
||||
# rule to run an executable
|
||||
|
|
|
|||
|
|
@ -0,0 +1,98 @@
|
|||
/**
|
||||
* @file Matrix-inl.h
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 26, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/Matrix.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using namespace std;
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* backSubstitute U*x=b
|
||||
* @param U an upper triangular matrix
|
||||
* @param b an RHS vector
|
||||
* @return the solution x of U*x=b
|
||||
*/
|
||||
template<class MatrixAE, class VectorAE>
|
||||
Vector backSubstituteUpper(const boost::numeric::ublas::matrix_expression<MatrixAE>& _U,
|
||||
const boost::numeric::ublas::vector_expression<VectorAE>& _b, bool unit=false) {
|
||||
const MatrixAE& U((const MatrixAE&)_U);
|
||||
const VectorAE& b((const VectorAE&)_b);
|
||||
size_t n = U.size2();
|
||||
#ifndef NDEBUG
|
||||
size_t m = U.size1();
|
||||
if (m!=n)
|
||||
throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
#endif
|
||||
|
||||
Vector result(n);
|
||||
for (size_t i = n; i > 0; i--) {
|
||||
double zi = b(i-1);
|
||||
for (size_t j = i+1; j <= n; j++)
|
||||
zi -= U(i-1,j-1) * result(j-1);
|
||||
#ifndef NDEBUG
|
||||
if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
stringstream ss;
|
||||
ss << "backSubstituteUpper: U is singular,\n";
|
||||
print(U, "U: ", ss);
|
||||
throw invalid_argument(ss.str());
|
||||
}
|
||||
#endif
|
||||
if (!unit) zi /= U(i-1,i-1);
|
||||
result(i-1) = zi;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* backSubstitute x'*U=b'
|
||||
* @param U an upper triangular matrix
|
||||
* @param b an RHS vector
|
||||
* @param unit, set true if unit triangular
|
||||
* @return the solution x of x'*U=b'
|
||||
* TODO: use boost
|
||||
*/
|
||||
template<class VectorAE, class MatrixAE>
|
||||
Vector backSubstituteUpper(const boost::numeric::ublas::vector_expression<VectorAE>& _b,
|
||||
const boost::numeric::ublas::matrix_expression<MatrixAE>& _U, bool unit=false) {
|
||||
const VectorAE& b((const VectorAE&)_b);
|
||||
const MatrixAE& U((const MatrixAE&)_U);
|
||||
size_t n = U.size2();
|
||||
#ifndef NDEBUG
|
||||
size_t m = U.size1();
|
||||
if (m!=n)
|
||||
throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
#endif
|
||||
|
||||
Vector result(n);
|
||||
for (size_t i = 1; i <= n; i++) {
|
||||
double zi = b(i-1);
|
||||
for (size_t j = 1; j < i; j++)
|
||||
zi -= U(j-1,i-1) * result(j-1);
|
||||
#ifndef NDEBUG
|
||||
if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
stringstream ss;
|
||||
ss << "backSubstituteUpper: U is singular,\n";
|
||||
print(U, "U: ", ss);
|
||||
throw invalid_argument(ss.str());
|
||||
}
|
||||
#endif
|
||||
if (!unit) zi /= U(i-1,i-1);
|
||||
result(i-1) = zi;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
151
base/Matrix.cpp
151
base/Matrix.cpp
|
|
@ -38,7 +38,7 @@ extern "C" {
|
|||
#include <suitesparse/ldl.h>
|
||||
#endif
|
||||
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/base/Matrix-inl.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/base/svdcmp.h>
|
||||
|
||||
|
|
@ -47,6 +47,12 @@ namespace ublas = boost::numeric::ublas;
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/** Explicit instantiations of template functions for standard types */
|
||||
template Vector backSubstituteUpper<Matrix,Vector>(const boost::numeric::ublas::matrix_expression<Matrix>& U,
|
||||
const boost::numeric::ublas::vector_expression<Vector>& b, bool unit);
|
||||
template Vector backSubstituteUpper<Vector,Matrix>(const boost::numeric::ublas::vector_expression<Vector>& b,
|
||||
const boost::numeric::ublas::matrix_expression<Matrix>& U, bool unit);
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix Matrix_( size_t m, size_t n, const double* const data) {
|
||||
Matrix A(m,n);
|
||||
|
|
@ -115,17 +121,25 @@ bool equal_with_abs_tol(const Matrix& A, const Matrix& B, double tol) {
|
|||
size_t n1 = A.size2(), m1 = A.size1();
|
||||
size_t n2 = B.size2(), m2 = B.size1();
|
||||
|
||||
if(m1!=m2 || n1!=n2) return false;
|
||||
bool equal = true;
|
||||
|
||||
for(size_t i=0; i<m1; i++)
|
||||
for(size_t j=0; j<n1; j++) {
|
||||
if(m1!=m2 || n1!=n2) equal = false;
|
||||
|
||||
for(size_t i=0; i<m1 && equal; i++)
|
||||
for(size_t j=0; j<n1 && equal; j++) {
|
||||
if(isnan(A(i,j)) xor isnan(B(i,j)))
|
||||
return false;
|
||||
if(fabs(A(i,j) - B(i,j)) > tol)
|
||||
return false;
|
||||
equal = false;
|
||||
else if(fabs(A(i,j) - B(i,j)) > tol)
|
||||
equal = false;
|
||||
}
|
||||
|
||||
return true;
|
||||
// if(!equal) {
|
||||
// cout << "not equal:" << endl;
|
||||
// print(A,"expected = ");
|
||||
// print(B,"actual = ");
|
||||
// }
|
||||
|
||||
return equal;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -709,63 +723,63 @@ void householder(Matrix &A) {
|
|||
#endif
|
||||
#endif
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector backSubstituteUpper(const Matrix& U, const Vector& b, bool unit) {
|
||||
size_t n = U.size2();
|
||||
#ifndef NDEBUG
|
||||
size_t m = U.size1();
|
||||
if (m!=n)
|
||||
throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
#endif
|
||||
///* ************************************************************************* */
|
||||
//Vector backSubstituteUpper(const Matrix& U, const Vector& b, bool unit) {
|
||||
// size_t n = U.size2();
|
||||
//#ifndef NDEBUG
|
||||
// size_t m = U.size1();
|
||||
// if (m!=n)
|
||||
// throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
//#endif
|
||||
//
|
||||
// Vector result(n);
|
||||
// for (size_t i = n; i > 0; i--) {
|
||||
// double zi = b(i-1);
|
||||
// for (size_t j = i+1; j <= n; j++)
|
||||
// zi -= U(i-1,j-1) * result(j-1);
|
||||
//#ifndef NDEBUG
|
||||
// if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
// stringstream ss;
|
||||
// ss << "backSubstituteUpper: U is singular,\n";
|
||||
// print(U, "U: ", ss);
|
||||
// throw invalid_argument(ss.str());
|
||||
// }
|
||||
//#endif
|
||||
// if (!unit) zi /= U(i-1,i-1);
|
||||
// result(i-1) = zi;
|
||||
// }
|
||||
//
|
||||
// return result;
|
||||
//}
|
||||
|
||||
Vector result(n);
|
||||
for (size_t i = n; i > 0; i--) {
|
||||
double zi = b(i-1);
|
||||
for (size_t j = i+1; j <= n; j++)
|
||||
zi -= U(i-1,j-1) * result(j-1);
|
||||
#ifndef NDEBUG
|
||||
if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
stringstream ss;
|
||||
ss << "backSubstituteUpper: U is singular,\n";
|
||||
print(U, "U: ", ss);
|
||||
throw invalid_argument(ss.str());
|
||||
}
|
||||
#endif
|
||||
if (!unit) zi /= U(i-1,i-1);
|
||||
result(i-1) = zi;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector backSubstituteUpper(const Vector& b, const Matrix& U, bool unit) {
|
||||
size_t n = U.size2();
|
||||
#ifndef NDEBUG
|
||||
size_t m = U.size1();
|
||||
if (m!=n)
|
||||
throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
#endif
|
||||
|
||||
Vector result(n);
|
||||
for (size_t i = 1; i <= n; i++) {
|
||||
double zi = b(i-1);
|
||||
for (size_t j = 1; j < i; j++)
|
||||
zi -= U(j-1,i-1) * result(j-1);
|
||||
#ifndef NDEBUG
|
||||
if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
stringstream ss;
|
||||
ss << "backSubstituteUpper: U is singular,\n";
|
||||
print(U, "U: ", ss);
|
||||
throw invalid_argument(ss.str());
|
||||
}
|
||||
#endif
|
||||
if (!unit) zi /= U(i-1,i-1);
|
||||
result(i-1) = zi;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//Vector backSubstituteUpper(const Vector& b, const Matrix& U, bool unit) {
|
||||
// size_t n = U.size2();
|
||||
//#ifndef NDEBUG
|
||||
// size_t m = U.size1();
|
||||
// if (m!=n)
|
||||
// throw invalid_argument("backSubstituteUpper: U must be square");
|
||||
//#endif
|
||||
//
|
||||
// Vector result(n);
|
||||
// for (size_t i = 1; i <= n; i++) {
|
||||
// double zi = b(i-1);
|
||||
// for (size_t j = 1; j < i; j++)
|
||||
// zi -= U(j-1,i-1) * result(j-1);
|
||||
//#ifndef NDEBUG
|
||||
// if(!unit && fabs(U(i-1,i-1)) <= numeric_limits<double>::epsilon()) {
|
||||
// stringstream ss;
|
||||
// ss << "backSubstituteUpper: U is singular,\n";
|
||||
// print(U, "U: ", ss);
|
||||
// throw invalid_argument(ss.str());
|
||||
// }
|
||||
//#endif
|
||||
// if (!unit) zi /= U(i-1,i-1);
|
||||
// result(i-1) = zi;
|
||||
// }
|
||||
//
|
||||
// return result;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector backSubstituteLower(const Matrix& L, const Vector& b, bool unit) {
|
||||
|
|
@ -885,6 +899,15 @@ void vector_scale_inplace(const Vector& v, Matrix& A) {
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// row scaling, in-place
|
||||
void vector_scale_inplace(const Vector& v, MatrixColMajor& A) {
|
||||
size_t m = A.size1(); size_t n = A.size2();
|
||||
double *Aij = A.data().begin();
|
||||
for (size_t j=0; j<n; ++j) // loop over rows
|
||||
for (size_t i=0; i<m; ++i, ++Aij) (*Aij) *= v(i);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// row scaling
|
||||
Matrix vector_scale(const Vector& v, const Matrix& A) {
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
#if ! defined (MEX_H)
|
||||
typedef boost::numeric::ublas::matrix<double> Matrix;
|
||||
typedef boost::numeric::ublas::matrix<double, boost::numeric::ublas::column_major> MatrixColMajor;
|
||||
#endif
|
||||
|
||||
namespace gtsam {
|
||||
|
|
@ -274,7 +275,9 @@ void householder(Matrix &A);
|
|||
* @return the solution x of U*x=b
|
||||
* TODO: use boost
|
||||
*/
|
||||
Vector backSubstituteUpper(const Matrix& U, const Vector& b, bool unit=false);
|
||||
template<class MatrixAE, class VectorAE>
|
||||
Vector backSubstituteUpper(const boost::numeric::ublas::matrix_expression<MatrixAE>& U,
|
||||
const boost::numeric::ublas::vector_expression<VectorAE>& b, bool unit=false);
|
||||
|
||||
/**
|
||||
* backSubstitute x'*U=b'
|
||||
|
|
@ -284,7 +287,9 @@ Vector backSubstituteUpper(const Matrix& U, const Vector& b, bool unit=false);
|
|||
* @return the solution x of x'*U=b'
|
||||
* TODO: use boost
|
||||
*/
|
||||
Vector backSubstituteUpper(const Vector& b, const Matrix& U, bool unit=false);
|
||||
template<class VectorAE, class MatrixAE>
|
||||
Vector backSubstituteUpper(const boost::numeric::ublas::vector_expression<VectorAE>& b,
|
||||
const boost::numeric::ublas::matrix_expression<MatrixAE>& U, bool unit=false);
|
||||
|
||||
/**
|
||||
* backSubstitute L*x=b
|
||||
|
|
@ -321,6 +326,7 @@ Matrix collect(size_t nrMatrices, ...);
|
|||
* Arguments (Matrix, Vector) scales the columns,
|
||||
* (Vector, Matrix) scales the rows
|
||||
*/
|
||||
void vector_scale_inplace(const Vector& v, MatrixColMajor& A); // row
|
||||
void vector_scale_inplace(const Vector& v, Matrix& A); // row
|
||||
Matrix vector_scale(const Vector& v, const Matrix& A); // row
|
||||
Matrix vector_scale(const Matrix& A, const Vector& v); // column
|
||||
|
|
|
|||
|
|
@ -6,10 +6,14 @@
|
|||
* Description: the utility functions for SPQR
|
||||
*/
|
||||
|
||||
#include <map>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/SPQRUtil.h>
|
||||
#include <boost/numeric/ublas/matrix.hpp>
|
||||
#include <boost/numeric/ublas/matrix_proxy.hpp>
|
||||
#include <boost/numeric/ublas/triangular.hpp>
|
||||
|
||||
using namespace std;
|
||||
namespace ublas = boost::numeric::ublas;
|
||||
|
||||
#ifdef GT_USE_LAPACK
|
||||
namespace gtsam {
|
||||
|
|
@ -85,34 +89,31 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
void householder_spqr(Matrix &A, long* Stair) {
|
||||
|
||||
tic("householder_spqr");
|
||||
|
||||
long m = A.size1();
|
||||
long n = A.size2();
|
||||
|
||||
bool allocedStair = false;
|
||||
if (Stair == NULL) {
|
||||
allocedStair = true;
|
||||
Stair = new long[n];
|
||||
for(int j=0; j<n; j++)
|
||||
Stair[j] = m;
|
||||
}
|
||||
|
||||
|
||||
// YDJ: allocate buffer if m*n grows more than 2M bytes
|
||||
const int sz = 250000; // the stack is limited to 2M, o
|
||||
const int mn = m*n ;
|
||||
double buf[sz] ;
|
||||
double *a ;
|
||||
if ( mn > sz ) a = new double [mn] ;
|
||||
else a = buf ;
|
||||
|
||||
tic("householder_spqr: row->col");
|
||||
// convert from row major to column major
|
||||
int k = 0;
|
||||
for(int j=0; j<n; j++)
|
||||
for(int i=0; i<m; i++, k++)
|
||||
a[k] = A(i,j);
|
||||
ublas::matrix<double, ublas::column_major> Acolwise(A);
|
||||
double *a = Acolwise.data().begin();
|
||||
toc("householder_spqr: row->col");
|
||||
|
||||
tic("householder_spqr: spqr_front");
|
||||
long npiv = min(m,n);
|
||||
double tol = -1; long ntol = -1; // no tolerance is used
|
||||
long fchunk = m < 32 ? m : 32;
|
||||
char Rdead[npiv];
|
||||
char Rdead[npiv]; memset(Rdead, 0, sizeof(char)*npiv);
|
||||
double Tau[n];
|
||||
long b = min(fchunk, min(n, m));
|
||||
double W[b*(n+b)];
|
||||
|
|
@ -122,11 +123,32 @@ namespace gtsam {
|
|||
cholmod_common cc;
|
||||
cholmod_l_start(&cc);
|
||||
|
||||
// todo: do something with the rank
|
||||
long rank = spqr_front<double>(m, n, npiv, tol, ntol, fchunk,
|
||||
a, Stair, Rdead, Tau, W, &wscale, &wssq, &cc);
|
||||
toc("householder_spqr: spqr_front");
|
||||
|
||||
//#ifndef NDEBUG
|
||||
for(long j=0; j<npiv; ++j)
|
||||
if(Rdead[j]) {
|
||||
cout << "In householder_spqr, aborting because some columns were found to be\n"
|
||||
"numerically linearly-dependent and we cannot handle this case yet." << endl;
|
||||
print(A, "The matrix being factored was\n");
|
||||
ublas::matrix_range<ublas::matrix<double,ublas::column_major> > Acolsub(
|
||||
ublas::project(Acolwise, ublas::range(0, min(m,n)), ublas::range(0,n)));
|
||||
print(Matrix(ublas::triangular_adaptor<typeof(Acolsub), ublas::upper>(Acolsub)), "and the result was\n");
|
||||
cout << "The following columns are \"dead\":";
|
||||
for(long k=0; k<npiv; ++k)
|
||||
if(Rdead[k]) cout << " " << k;
|
||||
cout << endl;
|
||||
exit(1);
|
||||
}
|
||||
//#endif
|
||||
|
||||
tic("householder_spqr: col->row");
|
||||
long k0 = 0;
|
||||
long j0;
|
||||
int k;
|
||||
memset(A.data().begin(), 0, m*n*sizeof(double));
|
||||
for(long j=0; j<n; j++, k0+=m) {
|
||||
k = k0;
|
||||
|
|
@ -135,10 +157,68 @@ namespace gtsam {
|
|||
A(i,j) = a[k];
|
||||
}
|
||||
|
||||
if ( mn > sz ) delete [] a;
|
||||
// ublas::matrix_range<ublas::matrix<double,ublas::column_major> > Acolsub(
|
||||
// ublas::project(Acolwise, ublas::range(0, min(m,n)), ublas::range(0,n)));
|
||||
// ublas::matrix_range<Matrix> Asub(ublas::project(A, ublas::range(0, min(m,n)), ublas::range(0,n)));
|
||||
// ublas::noalias(Asub) = ublas::triangular_adaptor<typeof(Acolsub), ublas::upper>(Acolsub);
|
||||
|
||||
toc("householder_spqr: col->row");
|
||||
|
||||
delete []Stair;
|
||||
cholmod_l_finish(&cc);
|
||||
|
||||
if(allocedStair) delete[] Stair;
|
||||
|
||||
toc("householder_spqr");
|
||||
}
|
||||
|
||||
void householder_spqr_colmajor(ublas::matrix<double, ublas::column_major>& A, long *Stair) {
|
||||
tic("householder_spqr");
|
||||
|
||||
long m = A.size1();
|
||||
long n = A.size2();
|
||||
|
||||
assert(Stair != NULL);
|
||||
|
||||
tic("householder_spqr: spqr_front");
|
||||
long npiv = min(m,n);
|
||||
double tol = -1; long ntol = -1; // no tolerance is used
|
||||
long fchunk = m < 32 ? m : 32;
|
||||
char Rdead[npiv]; memset(Rdead, 0, sizeof(char)*npiv);
|
||||
double Tau[n];
|
||||
long b = min(fchunk, min(n, m));
|
||||
double W[b*(n+b)];
|
||||
double wscale = 0;
|
||||
double wssq = 0;
|
||||
|
||||
cholmod_common cc;
|
||||
cholmod_l_start(&cc);
|
||||
|
||||
// todo: do something with the rank
|
||||
long rank = spqr_front<double>(m, n, npiv, tol, ntol, fchunk,
|
||||
A.data().begin(), Stair, Rdead, Tau, W, &wscale, &wssq, &cc);
|
||||
toc("householder_spqr: spqr_front");
|
||||
|
||||
//#ifndef NDEBUG
|
||||
for(long j=0; j<npiv; ++j)
|
||||
if(Rdead[j]) {
|
||||
cout << "In householder_spqr, aborting because some columns were found to be\n"
|
||||
"numerically linearly-dependent and we cannot handle this case yet." << endl;
|
||||
print(A, "The matrix being factored was\n");
|
||||
ublas::matrix_range<ublas::matrix<double,ublas::column_major> > Acolsub(
|
||||
ublas::project(A, ublas::range(0, min(m,n)), ublas::range(0,n)));
|
||||
print(Matrix(ublas::triangular_adaptor<typeof(Acolsub), ublas::upper>(Acolsub)), "and the result was\n");
|
||||
cout << "The following columns are \"dead\":";
|
||||
for(long k=0; k<npiv; ++k)
|
||||
if(Rdead[k]) cout << " " << k;
|
||||
cout << endl;
|
||||
exit(1);
|
||||
}
|
||||
//#endif
|
||||
|
||||
cholmod_l_finish(&cc);
|
||||
|
||||
toc("householder_spqr");
|
||||
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -21,5 +21,6 @@ namespace gtsam {
|
|||
/** Householder tranformation, zeros below diagonal */
|
||||
void householder_spqr(Matrix &A, long* Stair = NULL);
|
||||
|
||||
void householder_spqr_colmajor(boost::numeric::ublas::matrix<double, boost::numeric::ublas::column_major>& A, long *Stair);
|
||||
}
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -24,20 +24,30 @@ namespace gtsam {
|
|||
class Testable {
|
||||
|
||||
public:
|
||||
virtual ~Testable() {}
|
||||
// virtual ~Testable() {}
|
||||
|
||||
/**
|
||||
* print
|
||||
* @param s optional string naming the object
|
||||
*/
|
||||
virtual void print(const std::string& name) const = 0;
|
||||
// virtual void print(const std::string& name) const = 0;
|
||||
|
||||
/**
|
||||
* equality up to tolerance
|
||||
* tricky to implement, see NonlinearFactor1 for an example
|
||||
* equals is not supposed to print out *anything*, just return true|false
|
||||
*/
|
||||
virtual bool equals(const Derived& expected, double tol) const = 0;
|
||||
// virtual bool equals(const Derived& expected, double tol) const = 0;
|
||||
|
||||
private:
|
||||
static void concept(const Derived& d) {
|
||||
const Derived *const_derived = static_cast<Derived*>(&d);
|
||||
const_derived->print(std::string());
|
||||
const_derived->print();
|
||||
bool r;
|
||||
r = const_derived->equals(*const_derived, 1.0);
|
||||
r = const_derived->equals(*const_derived);
|
||||
}
|
||||
|
||||
}; // Testable class
|
||||
|
||||
|
|
|
|||
|
|
@ -7,26 +7,48 @@
|
|||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* Equals testing for basic types
|
||||
*/
|
||||
bool assert_equal(const varid_t& expected, const varid_t& actual, double tol = 0.0) {
|
||||
if(expected != actual) {
|
||||
std::cout << "Not equal:\nexpected: " << expected << "\nactual: " << actual << std::endl;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Version of assert_equals to work with vectors
|
||||
*/
|
||||
template<class V>
|
||||
bool assert_equal(const std::vector<V>& expected, const std::vector<V>& actual, double tol = 1e-9) {
|
||||
if (expected.size() != actual.size()) {
|
||||
printf("Sizes not equal:\n");
|
||||
printf("expected size: %lu\n", expected.size());
|
||||
printf("actual size: %lu\n", actual.size());
|
||||
return false;
|
||||
}
|
||||
size_t i = 0;
|
||||
BOOST_FOREACH(const V& a, expected) {
|
||||
if (!assert_equal(a, expected[i++], tol))
|
||||
return false;
|
||||
bool match = true;
|
||||
if (expected.size() != actual.size())
|
||||
match = false;
|
||||
if(match) {
|
||||
size_t i = 0;
|
||||
BOOST_FOREACH(const V& a, expected) {
|
||||
if (!assert_equal(a, expected[i++], tol)) {
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(!match) {
|
||||
std::cout << "expected: ";
|
||||
BOOST_FOREACH(const V& a, expected) { std::cout << a << " "; }
|
||||
std::cout << "\nactual: ";
|
||||
BOOST_FOREACH(const V& a, actual) { std::cout << a << " "; }
|
||||
std::cout << std::endl;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,272 @@
|
|||
/**
|
||||
* @file blockMatrices.h
|
||||
* @brief Access to matrices via blocks of pre-defined sizes. Used in GaussianFactor and GaussianConditional.
|
||||
* @author Richard Roberts
|
||||
* @created Sep 18, 2010
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <boost/numeric/ublas/matrix_proxy.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/** This is a wrapper around ublas::matrix_column that stores a copy of a
|
||||
* ublas::matrix_range. This does not copy the matrix data itself. The
|
||||
* purpose of this class is to be allow a column-of-a-range to be returned
|
||||
* from a function, given that a standard column-of-a-range just stores a
|
||||
* reference to the range. The stored range stores a reference to the original
|
||||
* matrix.
|
||||
*/
|
||||
template<class Matrix>
|
||||
class BlockColumn : public boost::numeric::ublas::vector_expression<BlockColumn<Matrix> > {
|
||||
protected:
|
||||
typedef boost::numeric::ublas::matrix_range<Matrix> Range;
|
||||
typedef boost::numeric::ublas::matrix_column<Range> Base;
|
||||
Range range_;
|
||||
Base base_;
|
||||
public:
|
||||
typedef BlockColumn<Matrix> Self;
|
||||
typedef typename Base::matrix_type matrix_type;
|
||||
typedef typename Base::size_type size_type;
|
||||
typedef typename Base::difference_type difference_type;
|
||||
typedef typename Base::value_type value_type;
|
||||
typedef typename Base::const_reference const_reference;
|
||||
typedef typename Base::reference reference;
|
||||
typedef typename Base::storage_category storage_category;
|
||||
typedef Self closure_type;
|
||||
typedef const Self const_closure_type;
|
||||
typedef typename Base::iterator iterator;
|
||||
typedef typename Base::const_iterator const_iterator;
|
||||
|
||||
BlockColumn(const boost::numeric::ublas::matrix_range<Matrix>& block, size_t column) :
|
||||
range_(block), base_(range_, column) {}
|
||||
BlockColumn(const BlockColumn& rhs) :
|
||||
range_(rhs.range_), base_(rhs.base_) {}
|
||||
BlockColumn& operator=(const BlockColumn& rhs) { base_.operator=(rhs.base_); return *this; }
|
||||
template<class AE> BlockColumn& operator=(const boost::numeric::ublas::vector_expression<AE>& rhs) { base_.operator=(rhs); return *this; }
|
||||
typename Base::size_type size() const { return base_.size(); }
|
||||
const typename Base::matrix_closure_type& data() const { return base_.data(); }
|
||||
typename Base::matrix_closure_type& data() { return base_.data(); }
|
||||
typename Base::const_reference operator()(typename Base::size_type i) const { return base_(i); }
|
||||
typename Base::reference operator()(typename Base::size_type i) { return base_(i); }
|
||||
BlockColumn& assign_temporary(BlockColumn& rhs) { base_.assign_temporary(rhs.base_); return *this; }
|
||||
BlockColumn& assign_temporary(Base& rhs) { base_.assign_temporary(rhs); return *this; }
|
||||
bool same_closure(const BlockColumn& rhs) { return base_.same_closure(rhs.base_); }
|
||||
bool same_closure(const Base& rhs) { return base_.same_closure(rhs); }
|
||||
template<class AE> BlockColumn& assign(const boost::numeric::ublas::vector_expression<AE>& rhs) { base_.assign(rhs); return *this; }
|
||||
iterator begin() { return base_.begin(); }
|
||||
const_iterator begin() const { return base_.begin(); }
|
||||
iterator end() { return base_.end(); }
|
||||
const_iterator end() const { return base_.end(); }
|
||||
};
|
||||
|
||||
/**
|
||||
* This class stores a *reference* to a matrix and allows it to be accessed as
|
||||
* a collection of vertical blocks. It also provides for accessing individual
|
||||
* columns from those blocks. When constructed or resized, the caller must
|
||||
* provide the dimensions of the blocks, as well as an underlying matrix
|
||||
* storage object. This class will resize the underlying matrix such that it
|
||||
* is consistent with the given block dimensions.
|
||||
*
|
||||
* This class also has three parameters that can be changed after construction
|
||||
* that change the
|
||||
*/
|
||||
template<class Matrix>
|
||||
class VerticalBlockView {
|
||||
public:
|
||||
typedef Matrix matrix_type;
|
||||
typedef typename boost::numeric::ublas::matrix_range<Matrix> block_type;
|
||||
typedef typename boost::numeric::ublas::matrix_range<const Matrix> const_block_type;
|
||||
typedef BlockColumn<Matrix> column_type;
|
||||
typedef BlockColumn<const Matrix> const_column_type;
|
||||
|
||||
protected:
|
||||
matrix_type& matrix_;
|
||||
std::vector<size_t> variableColOffsets_;
|
||||
size_t rowStart_;
|
||||
size_t rowEnd_;
|
||||
size_t blockStart_;
|
||||
|
||||
public:
|
||||
/** Construct from an empty matrix (asserts that the matrix is empty) */
|
||||
VerticalBlockView(matrix_type& matrix);
|
||||
|
||||
/** Construct from iterators over the sizes of each vertical block */
|
||||
template<typename Iterator>
|
||||
VerticalBlockView(matrix_type& matrix, Iterator firstBlockDim, Iterator lastBlockDim);
|
||||
|
||||
/** Construct from a vector of the sizes of each vertical block, resize the
|
||||
* matrix so that its height is matrixNewHeight and its width fits the given
|
||||
* block dimensions.
|
||||
*/
|
||||
template<typename Iterator>
|
||||
VerticalBlockView(matrix_type& matrix, Iterator firstBlockDim, Iterator lastBlockDim, size_t matrixNewHeight);
|
||||
|
||||
size_t size1() const { checkInvariants(); return rowEnd_ - rowStart_; }
|
||||
size_t size2() const { checkInvariants(); return variableColOffsets_.back() - variableColOffsets_[blockStart_]; }
|
||||
size_t nBlocks() const { checkInvariants(); return variableColOffsets_.size() - 1 - blockStart_; }
|
||||
|
||||
block_type operator()(size_t block) {
|
||||
return range(block, block+1);
|
||||
}
|
||||
|
||||
const_block_type operator()(size_t block) const {
|
||||
return range(block, block+1);
|
||||
}
|
||||
|
||||
block_type range(size_t startBlock, size_t endBlock) {
|
||||
checkInvariants();
|
||||
size_t actualStartBlock = startBlock + blockStart_;
|
||||
size_t actualEndBlock = endBlock + blockStart_;
|
||||
checkBlock(actualStartBlock);
|
||||
assert(actualEndBlock < variableColOffsets_.size());
|
||||
return block_type(matrix_,
|
||||
boost::numeric::ublas::range(rowStart_, rowEnd_),
|
||||
boost::numeric::ublas::range(variableColOffsets_[actualStartBlock], variableColOffsets_[actualEndBlock]));
|
||||
}
|
||||
|
||||
const_block_type range(size_t startBlock, size_t endBlock) const {
|
||||
checkInvariants();
|
||||
size_t actualStartBlock = startBlock + blockStart_;
|
||||
size_t actualEndBlock = endBlock + blockStart_;
|
||||
checkBlock(actualStartBlock);
|
||||
assert(actualEndBlock < variableColOffsets_.size());
|
||||
return const_block_type(matrix_,
|
||||
boost::numeric::ublas::range(rowStart_, rowEnd_),
|
||||
boost::numeric::ublas::range(variableColOffsets_[actualStartBlock], variableColOffsets_[actualEndBlock]));
|
||||
}
|
||||
|
||||
column_type column(size_t block, size_t columnOffset) {
|
||||
checkInvariants();
|
||||
size_t actualBlock = block + blockStart_;
|
||||
checkBlock(actualBlock);
|
||||
assert(variableColOffsets_[actualBlock] + columnOffset < matrix_.size2());
|
||||
block_type blockMat(operator()(block));
|
||||
return column_type(blockMat, columnOffset);
|
||||
}
|
||||
|
||||
const_column_type column(size_t block, size_t columnOffset) const {
|
||||
checkInvariants();
|
||||
size_t actualBlock = block + blockStart_;
|
||||
checkBlock(actualBlock);
|
||||
assert(variableColOffsets_[actualBlock] + columnOffset < matrix_.size2());
|
||||
const_block_type blockMat(operator()(block));
|
||||
return const_column_type(blockMat, columnOffset);
|
||||
}
|
||||
|
||||
size_t offset(size_t block) const {
|
||||
checkInvariants();
|
||||
size_t actualBlock = block + blockStart_;
|
||||
checkBlock(actualBlock);
|
||||
return variableColOffsets_[actualBlock] - variableColOffsets_[blockStart_];
|
||||
}
|
||||
|
||||
size_t& rowStart() { return rowStart_; }
|
||||
size_t& rowEnd() { return rowEnd_; }
|
||||
size_t& firstBlock() { return blockStart_; }
|
||||
size_t rowStart() const { return rowStart_; }
|
||||
size_t rowEnd() const { return rowEnd_; }
|
||||
size_t firstBlock() const { return blockStart_; }
|
||||
|
||||
/** Copy the block structure and resize the underlying matrix, but do not
|
||||
* copy the matrix data.
|
||||
*/
|
||||
template<class RhsMatrix>
|
||||
void copyStructureFrom(const VerticalBlockView<RhsMatrix>& rhs);
|
||||
|
||||
/** Copy the block struture and matrix data, resizing the underlying matrix
|
||||
* in the process. This can deal with assigning between different types of
|
||||
* underlying matrices, as long as the matrices themselves are assignable.
|
||||
* To avoid creating a temporary matrix this assumes no aliasing, i.e. that
|
||||
* no part of the underlying matrices refer to the same memory!
|
||||
*/
|
||||
template<class RhsMatrix>
|
||||
VerticalBlockView<Matrix>& assignNoalias(const VerticalBlockView<RhsMatrix>& rhs);
|
||||
|
||||
protected:
|
||||
void checkInvariants() const {
|
||||
assert(matrix_.size2() == variableColOffsets_.back());
|
||||
assert(blockStart_ < variableColOffsets_.size());
|
||||
assert(rowStart_ <= matrix_.size1());
|
||||
assert(rowEnd_ <= matrix_.size1());
|
||||
assert(rowStart_ <= rowEnd_);
|
||||
}
|
||||
|
||||
void checkBlock(size_t block) const {
|
||||
assert(matrix_.size2() == variableColOffsets_.back());
|
||||
assert(block < variableColOffsets_.size()-1);
|
||||
assert(variableColOffsets_[block] < matrix_.size2() && variableColOffsets_[block+1] <= matrix_.size2());
|
||||
}
|
||||
|
||||
template<typename Iterator>
|
||||
void fillOffsets(Iterator firstBlockDim, Iterator lastBlockDim) {
|
||||
variableColOffsets_.resize((lastBlockDim-firstBlockDim)+1);
|
||||
variableColOffsets_[0] = 0;
|
||||
size_t j=0;
|
||||
for(Iterator dim=firstBlockDim; dim!=lastBlockDim; ++dim) {
|
||||
variableColOffsets_[j+1] = variableColOffsets_[j] + *dim;
|
||||
++ j;
|
||||
}
|
||||
}
|
||||
|
||||
template<class RhsMatrix>
|
||||
friend class VerticalBlockView<Matrix>;
|
||||
};
|
||||
|
||||
template<class Matrix>
|
||||
VerticalBlockView<Matrix>::VerticalBlockView(matrix_type& matrix) :
|
||||
matrix_(matrix), rowStart_(0), rowEnd_(matrix_.size1()), blockStart_(0) {
|
||||
fillOffsets((size_t*)0, (size_t*)0);
|
||||
checkInvariants();
|
||||
}
|
||||
|
||||
template<class Matrix>
|
||||
template<typename Iterator>
|
||||
VerticalBlockView<Matrix>::VerticalBlockView(matrix_type& matrix, Iterator firstBlockDim, Iterator lastBlockDim) :
|
||||
matrix_(matrix), rowStart_(0), rowEnd_(matrix_.size1()), blockStart_(0) {
|
||||
fillOffsets(firstBlockDim, lastBlockDim);
|
||||
checkInvariants();
|
||||
}
|
||||
|
||||
template<class Matrix>
|
||||
template<typename Iterator>
|
||||
VerticalBlockView<Matrix>::VerticalBlockView(matrix_type& matrix, Iterator firstBlockDim, Iterator lastBlockDim, size_t matrixNewHeight) :
|
||||
matrix_(matrix), rowStart_(0), rowEnd_(matrixNewHeight), blockStart_(0) {
|
||||
fillOffsets(firstBlockDim, lastBlockDim);
|
||||
matrix_.resize(matrixNewHeight, variableColOffsets_.back(), false);
|
||||
checkInvariants();
|
||||
}
|
||||
|
||||
template<class Matrix>
|
||||
template<class RhsMatrix>
|
||||
void VerticalBlockView<Matrix>::copyStructureFrom(const VerticalBlockView<RhsMatrix>& rhs) {
|
||||
matrix_.resize(rhs.rowEnd() - rhs.rowStart(), rhs.range(0, rhs.nBlocks()).size2(), false);
|
||||
if(rhs.blockStart_ == 0)
|
||||
variableColOffsets_ = rhs.variableColOffsets_;
|
||||
else {
|
||||
variableColOffsets_.resize(rhs.nBlocks() + 1);
|
||||
variableColOffsets_[0] = 0;
|
||||
size_t j=0;
|
||||
assert(rhs.variableColOffsets_.begin()+rhs.blockStart_ < rhs.variableColOffsets_.end()-1);
|
||||
for(std::vector<size_t>::const_iterator off=rhs.variableColOffsets_.begin()+rhs.blockStart_; off!=rhs.variableColOffsets_.end()-1; ++off) {
|
||||
variableColOffsets_[j+1] = variableColOffsets_[j] + (*(off+1) - *off);
|
||||
++ j;
|
||||
}
|
||||
}
|
||||
rowStart_ = 0;
|
||||
rowEnd_ = matrix_.size1();
|
||||
blockStart_ = 0;
|
||||
checkInvariants();
|
||||
}
|
||||
|
||||
template<class Matrix>
|
||||
template<class RhsMatrix>
|
||||
VerticalBlockView<Matrix>& VerticalBlockView<Matrix>::assignNoalias(const VerticalBlockView<RhsMatrix>& rhs) {
|
||||
copyStructureFrom(rhs);
|
||||
boost::numeric::ublas::noalias(matrix_) = rhs.range(0, rhs.nBlocks());
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -101,6 +101,7 @@ TEST(SPQRUtil, houseHolder_spqr2)
|
|||
Matrix A1 = Matrix_(4, 7, data);
|
||||
long* Stair = MakeStairs(A1);
|
||||
householder_spqr(A1, Stair);
|
||||
delete[] Stair;
|
||||
CHECK(assert_equal(expected1, A1, 1e-3));
|
||||
}
|
||||
|
||||
|
|
@ -180,6 +181,7 @@ TEST(SPQRUtil, houseHolder_spqr4)
|
|||
|
||||
Matrix actualRstair = A;
|
||||
householder_spqr(actualRstair, Stair);
|
||||
delete[] Stair;
|
||||
|
||||
CHECK(assert_equal(expectedR, actualR, 1e-3));
|
||||
CHECK(assert_equal(expectedR, actualRstair, 1e-3));
|
||||
|
|
|
|||
|
|
@ -0,0 +1,272 @@
|
|||
/**
|
||||
* @file timeublas.cpp
|
||||
* @brief Tests to help determine which way of accomplishing something in ublas is faster
|
||||
* @author Richard Roberts
|
||||
* @created Sep 18, 2010
|
||||
*/
|
||||
|
||||
#include <boost/numeric/ublas/matrix.hpp>
|
||||
#include <boost/numeric/ublas/matrix_proxy.hpp>
|
||||
#include <boost/numeric/ublas/triangular.hpp>
|
||||
#include <boost/numeric/ublas/io.hpp>
|
||||
#include <boost/random.hpp>
|
||||
#include <boost/timer.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
using namespace std;
|
||||
namespace ublas = boost::numeric::ublas;
|
||||
using boost::timer;
|
||||
using boost::format;
|
||||
using namespace boost::lambda;
|
||||
|
||||
static boost::variate_generator<boost::mt19937, boost::uniform_real<> > rng(boost::mt19937(), boost::uniform_real<>(-1.0, 0.0));
|
||||
typedef ublas::matrix<double> matrix;
|
||||
typedef ublas::matrix_range<matrix> matrix_range;
|
||||
using ublas::range;
|
||||
using ublas::triangular_matrix;
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
|
||||
if(false) {
|
||||
cout << "\nTiming matrix_range:" << endl;
|
||||
|
||||
// We use volatile here to make these appear to the optimizing compiler as
|
||||
// if their values are only known at run-time.
|
||||
volatile size_t m=500;
|
||||
volatile size_t n=300;
|
||||
volatile size_t nReps = 1000;
|
||||
assert(m > n);
|
||||
boost::variate_generator<boost::mt19937, boost::uniform_int<size_t> > rni(boost::mt19937(), boost::uniform_int<size_t>(0,m-1));
|
||||
boost::variate_generator<boost::mt19937, boost::uniform_int<size_t> > rnj(boost::mt19937(), boost::uniform_int<size_t>(0,n-1));
|
||||
matrix mat(m,n);
|
||||
matrix_range full(mat, range(0,m), range(0,n));
|
||||
matrix_range top(mat, range(0,n), range(0,n));
|
||||
matrix_range block(mat, range(m/4, m-m/4), range(n/4, n-n/4));
|
||||
|
||||
cout << format(" Basic: %1%x%2%\n") % m % n;
|
||||
cout << format(" Full: mat(%1%:%2%, %3%:%4%)\n") % 0 % m % 0 % n;
|
||||
cout << format(" Top: mat(%1%:%2%, %3%:%4%)\n") % 0 % n % 0 % n;
|
||||
cout << format(" Block: mat(%1%:%2%, %3%:%4%)\n") % size_t(m/4) % size_t(m-m/4) % size_t(n/4) % size_t(n-n/4);
|
||||
cout << endl;
|
||||
|
||||
{
|
||||
timer tim;
|
||||
double basicTime, fullTime, topTime, blockTime;
|
||||
|
||||
cout << "Row-major matrix, row-major assignment:" << endl;
|
||||
|
||||
// Do a few initial assignments to let any cache effects stabilize
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
mat(i,j) = rng();
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
mat(i,j) = rng();
|
||||
basicTime = tim.elapsed();
|
||||
cout << format(" Basic: %1% mus/element") % double(1000000 * basicTime / double(mat.size1()*mat.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t i=0; i<full.size1(); ++i)
|
||||
for(size_t j=0; j<full.size2(); ++j)
|
||||
full(i,j) = rng();
|
||||
fullTime = tim.elapsed();
|
||||
cout << format(" Full: %1% mus/element") % double(1000000 * fullTime / double(full.size1()*full.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t i=0; i<top.size1(); ++i)
|
||||
for(size_t j=0; j<top.size2(); ++j)
|
||||
top(i,j) = rng();
|
||||
topTime = tim.elapsed();
|
||||
cout << format(" Top: %1% mus/element") % double(1000000 * topTime / double(top.size1()*top.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t i=0; i<block.size1(); ++i)
|
||||
for(size_t j=0; j<block.size2(); ++j)
|
||||
block(i,j) = rng();
|
||||
blockTime = tim.elapsed();
|
||||
cout << format(" Block: %1% mus/element") % double(1000000 * blockTime / double(block.size1()*block.size2()*nReps)) << endl;
|
||||
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
{
|
||||
timer tim;
|
||||
double basicTime, fullTime, topTime, blockTime;
|
||||
|
||||
cout << "Row-major matrix, column-major assignment:" << endl;
|
||||
|
||||
// Do a few initial assignments to let any cache effects stabilize
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
basicTime = tim.elapsed();
|
||||
cout << format(" Basic: %1% mus/element") % double(1000000 * basicTime / double(mat.size1()*mat.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t j=0; j<full.size2(); ++j)
|
||||
for(size_t i=0; i<full.size1(); ++i)
|
||||
full(i,j) = rng();
|
||||
fullTime = tim.elapsed();
|
||||
cout << format(" Full: %1% mus/element") % double(1000000 * fullTime / double(full.size1()*full.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t j=0; j<top.size2(); ++j)
|
||||
for(size_t i=0; i<top.size1(); ++i)
|
||||
top(i,j) = rng();
|
||||
topTime = tim.elapsed();
|
||||
cout << format(" Top: %1% mus/element") % double(1000000 * topTime / double(top.size1()*top.size2()*nReps)) << endl;
|
||||
|
||||
tim.restart();
|
||||
for(size_t rep=0; rep<nReps; ++rep)
|
||||
for(size_t j=0; j<block.size2(); ++j)
|
||||
for(size_t i=0; i<block.size1(); ++i)
|
||||
block(i,j) = rng();
|
||||
blockTime = tim.elapsed();
|
||||
cout << format(" Block: %1% mus/element") % double(1000000 * blockTime / double(block.size1()*block.size2()*nReps)) << endl;
|
||||
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
{
|
||||
timer tim;
|
||||
double basicTime, fullTime, topTime, blockTime;
|
||||
typedef pair<size_t,size_t> ij_t;
|
||||
vector<ij_t> ijs(100000);
|
||||
|
||||
cout << "Row-major matrix, random assignment:" << endl;
|
||||
|
||||
// Do a few initial assignments to let any cache effects stabilize
|
||||
for_each(ijs.begin(), ijs.end(), _1 = make_pair(rni(),rnj()));
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
BOOST_FOREACH(const ij_t& ij, ijs) { mat(ij.first, ij.second) = rng(); }
|
||||
|
||||
for_each(ijs.begin(), ijs.end(), _1 = make_pair(rni(),rnj()));
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
BOOST_FOREACH(const ij_t& ij, ijs) { mat(ij.first, ij.second) = rng(); }
|
||||
basicTime = tim.elapsed();
|
||||
cout << format(" Basic: %1% mus/element") % double(1000000 * basicTime / double(ijs.size()*nReps)) << endl;
|
||||
|
||||
for_each(ijs.begin(), ijs.end(), _1 = make_pair(rni(),rnj()));
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
BOOST_FOREACH(const ij_t& ij, ijs) { full(ij.first, ij.second) = rng(); }
|
||||
fullTime = tim.elapsed();
|
||||
cout << format(" Full: %1% mus/element") % double(1000000 * fullTime / double(ijs.size()*nReps)) << endl;
|
||||
|
||||
for_each(ijs.begin(), ijs.end(), _1 = make_pair(rni()%top.size1(),rnj()));
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
BOOST_FOREACH(const ij_t& ij, ijs) { top(ij.first, ij.second) = rng(); }
|
||||
topTime = tim.elapsed();
|
||||
cout << format(" Top: %1% mus/element") % double(1000000 * topTime / double(ijs.size()*nReps)) << endl;
|
||||
|
||||
for_each(ijs.begin(), ijs.end(), _1 = make_pair(rni()%block.size1(),rnj()%block.size2()));
|
||||
for(size_t rep=0; rep<1000; ++rep)
|
||||
BOOST_FOREACH(const ij_t& ij, ijs) { block(ij.first, ij.second) = rng(); }
|
||||
blockTime = tim.elapsed();
|
||||
cout << format(" Block: %1% mus/element") % double(1000000 * blockTime / double(ijs.size()*nReps)) << endl;
|
||||
|
||||
cout << endl;
|
||||
}
|
||||
}
|
||||
|
||||
if(true) {
|
||||
cout << "\nTesting square triangular matrices:" << endl;
|
||||
|
||||
typedef triangular_matrix<double, ublas::upper, ublas::column_major> triangular;
|
||||
typedef ublas::matrix<double, ublas::column_major> matrix;
|
||||
|
||||
triangular tri(5,5);
|
||||
|
||||
matrix mat(5,5);
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
|
||||
tri = ublas::triangular_adaptor<matrix, ublas::upper>(mat);
|
||||
cout << " Assigned from triangular adapter: " << tri << endl;
|
||||
|
||||
cout << " Triangular adapter of mat: " << ublas::triangular_adaptor<matrix, ublas::upper>(mat) << endl;
|
||||
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
mat = tri;
|
||||
cout << " Assign matrix from triangular: " << mat << endl;
|
||||
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
(ublas::triangular_adaptor<matrix, ublas::upper>(mat)) = tri;
|
||||
cout << " Assign triangular adaptor from triangular: " << mat << endl;
|
||||
}
|
||||
|
||||
{
|
||||
cout << "\nTesting wide triangular matrices:" << endl;
|
||||
|
||||
typedef triangular_matrix<double, ublas::upper, ublas::column_major> triangular;
|
||||
typedef ublas::matrix<double, ublas::column_major> matrix;
|
||||
|
||||
triangular tri(5,7);
|
||||
|
||||
matrix mat(5,7);
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
|
||||
tri = ublas::triangular_adaptor<matrix, ublas::upper>(mat);
|
||||
cout << " Assigned from triangular adapter: " << tri << endl;
|
||||
|
||||
cout << " Triangular adapter of mat: " << ublas::triangular_adaptor<matrix, ublas::upper>(mat) << endl;
|
||||
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
mat = tri;
|
||||
cout << " Assign matrix from triangular: " << mat << endl;
|
||||
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = rng();
|
||||
mat = ublas::triangular_adaptor<matrix, ublas::upper>(mat);
|
||||
cout << " Assign matrix from triangular adaptor of self: " << mat << endl;
|
||||
}
|
||||
|
||||
{
|
||||
cout << "\nTesting subvectors:" << endl;
|
||||
|
||||
typedef ublas::matrix<double, ublas::column_major> matrix;
|
||||
matrix mat(4,4);
|
||||
|
||||
for(size_t j=0; j<mat.size2(); ++j)
|
||||
for(size_t i=0; i<mat.size1(); ++i)
|
||||
mat(i,j) = i*mat.size1() + j;
|
||||
cout << " mat = " << mat;
|
||||
|
||||
cout << " vec(1:4, 2:2) = " << ublas::matrix_vector_range<matrix>(mat, ublas::range(1,4), ublas::range(2,2));
|
||||
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
/**
|
||||
* @file timing.cpp
|
||||
* @brief
|
||||
* @author Richard Roberts (extracted from Michael Kaess' timing functions)
|
||||
* @created Oct 5, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
Timing timing;
|
||||
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
/**
|
||||
* @file timing.h
|
||||
* @brief
|
||||
* @author Richard Roberts (extracted from Michael Kaess' timing functions)
|
||||
* @created Oct 5, 2010
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
// simple class for accumulating execution timing information by name
|
||||
class Timing;
|
||||
extern Timing timing;
|
||||
|
||||
double tic();
|
||||
double toc(double t);
|
||||
double tic(const std::string& id);
|
||||
double toc(const std::string& id);
|
||||
void tictoc_print();
|
||||
|
||||
/** These underscore versions work evening when ENABLE_TIMING is not defined */
|
||||
double tic_();
|
||||
double toc_(double t);
|
||||
double tic_(const std::string& id);
|
||||
double toc_(const std::string& id);
|
||||
void tictoc_print_();
|
||||
|
||||
|
||||
|
||||
#include <sys/time.h>
|
||||
#include <map>
|
||||
#include <string>
|
||||
// simple class for accumulating execution timing information by name
|
||||
class Timing {
|
||||
class Stats {
|
||||
public:
|
||||
double t0;
|
||||
double t;
|
||||
double t_max;
|
||||
double t_min;
|
||||
int n;
|
||||
};
|
||||
std::map<std::string, Stats> stats;
|
||||
public:
|
||||
void add_t0(const std::string& id, double t0) {
|
||||
stats[id].t0 = t0;
|
||||
}
|
||||
double get_t0(const std::string& id) {
|
||||
return stats[id].t0;
|
||||
}
|
||||
void add_dt(const std::string& id, double dt) {
|
||||
Stats& s = stats[id];
|
||||
s.t += dt;
|
||||
s.n++;
|
||||
if (s.n==1 || s.t_max < dt) s.t_max = dt;
|
||||
if (s.n==1 || s.t_min > dt) s.t_min = dt;
|
||||
}
|
||||
void print() {
|
||||
std::map<std::string, Stats>::iterator it;
|
||||
for(it = stats.begin(); it!=stats.end(); it++) {
|
||||
Stats& s = it->second;
|
||||
printf("%s: %g (%i times, min: %g, max: %g)\n",
|
||||
it->first.c_str(), s.t, s.n, s.t_min, s.t_max);
|
||||
}
|
||||
}
|
||||
double time(const std::string& id) {
|
||||
Stats& s = stats[id];
|
||||
return s.t;
|
||||
}
|
||||
};
|
||||
|
||||
inline double tic_() {
|
||||
struct timeval t;
|
||||
gettimeofday(&t, NULL);
|
||||
return ((double)t.tv_sec + ((double)t.tv_usec)/1000000.);
|
||||
}
|
||||
inline double toc_(double t) {
|
||||
double s = tic();
|
||||
return (std::max(0., s-t));
|
||||
}
|
||||
inline double tic_(const std::string& id) {
|
||||
double t0 = tic();
|
||||
timing.add_t0(id, t0);
|
||||
return t0;
|
||||
}
|
||||
inline double toc_(const std::string& id) {
|
||||
double dt = toc(timing.get_t0(id));
|
||||
timing.add_dt(id, dt);
|
||||
return dt;
|
||||
}
|
||||
inline void tictoc_print_() {
|
||||
timing.print();
|
||||
}
|
||||
|
||||
#ifdef ENABLE_TIMING
|
||||
inline double tic() { return tic_(); }
|
||||
inline double toc(double t) { return toc_(t); }
|
||||
inline double tic(const std::string& id) { return tic_(id); }
|
||||
inline double toc(const std::string& id) { return toc_(id); }
|
||||
inline void tictoc_print() { tictoc_print_(); }
|
||||
#else
|
||||
inline double tic() {return 0.;}
|
||||
inline double toc(double t) {return 0.;}
|
||||
inline double tic(const std::string& id) {return 0.;}
|
||||
inline double toc(const std::string& id) {return 0.;}
|
||||
inline void tictoc_print() {}
|
||||
#endif
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
/**
|
||||
* @file types.h
|
||||
* @brief Typedefs for easier changing of types
|
||||
* @author Richard Roberts
|
||||
* @created Aug 21, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <unistd.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
typedef size_t varid_t;
|
||||
|
||||
/** Helper class that uses templates to select between two types based on
|
||||
* whether TEST_TYPE is const or not.
|
||||
*/
|
||||
template<typename TEST_TYPE, typename BASIC_TYPE, typename AS_NON_CONST, typename AS_CONST>
|
||||
struct const_selector {};
|
||||
|
||||
/** Specialization for the non-const version */
|
||||
template<typename BASIC_TYPE, typename AS_NON_CONST, typename AS_CONST>
|
||||
struct const_selector<BASIC_TYPE, BASIC_TYPE, AS_NON_CONST, AS_CONST> {
|
||||
typedef AS_NON_CONST type;
|
||||
};
|
||||
/** Specialization for the const version */
|
||||
template<typename BASIC_TYPE, typename AS_NON_CONST, typename AS_CONST>
|
||||
struct const_selector<const BASIC_TYPE, BASIC_TYPE, AS_NON_CONST, AS_CONST> {
|
||||
typedef AS_CONST type;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper struct that encapsulates a value with a default, this is just used
|
||||
* as a member object so you don't have to specify defaults in the class
|
||||
* constructor.
|
||||
*/
|
||||
template<typename T, T defaultValue>
|
||||
struct ValueWithDefault {
|
||||
T value;
|
||||
ValueWithDefault() : value(defaultValue) {}
|
||||
ValueWithDefault(const T& _value) : value(_value) {}
|
||||
T& operator*() { return value; }
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -603,12 +603,12 @@
|
|||
/* ========================================================================== */
|
||||
|
||||
/* Ensure that debugging is turned off: */
|
||||
#ifndef NDEBUG
|
||||
#define NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
#define SPQR_NDEBUG
|
||||
#endif
|
||||
|
||||
/* turn on debugging by uncommenting the following line
|
||||
#undef NDEBUG
|
||||
#undef SPQR_NDEBUG
|
||||
*/
|
||||
|
||||
/* ========================================================================== */
|
||||
|
|
@ -626,7 +626,7 @@
|
|||
#include "matrix.h"
|
||||
#endif
|
||||
|
||||
#if !defined (NPRINT) || !defined (NDEBUG)
|
||||
#if !defined (NPRINT) || !defined (SPQR_NDEBUG)
|
||||
#include <stdio.h>
|
||||
#endif
|
||||
|
||||
|
|
@ -818,7 +818,7 @@ typedef struct CColamd_Row_struct
|
|||
/* === Debugging prototypes and definitions ================================= */
|
||||
/* ========================================================================== */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
|
|
@ -952,14 +952,14 @@ PRIVATE Int find_ordering
|
|||
CColamd_Col Col [ ],
|
||||
Int A [ ],
|
||||
Int head [ ],
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_col2,
|
||||
#endif
|
||||
Int max_deg,
|
||||
Int pfree,
|
||||
Int cset [ ],
|
||||
Int cset_start [ ],
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_cset,
|
||||
#endif
|
||||
Int cmember [ ],
|
||||
|
|
@ -976,7 +976,7 @@ PRIVATE Int find_ordering
|
|||
|
||||
PRIVATE void detect_super_cols
|
||||
(
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_col,
|
||||
CColamd_Row Row [ ],
|
||||
#endif
|
||||
|
|
@ -1212,7 +1212,7 @@ PUBLIC Int CSYMAMD_MAIN /* return TRUE if OK, FALSE otherwise */
|
|||
Int upper ; /* TRUE if ordering triu(A)+triu(A)' */
|
||||
Int lower ; /* TRUE if ordering tril(A)+tril(A)' */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
ccolamd_get_debug ("csymamd") ;
|
||||
#endif
|
||||
|
||||
|
|
@ -1595,7 +1595,7 @@ PUBLIC Int CCOLAMD_2 /* returns TRUE if successful, FALSE otherwise */
|
|||
|
||||
int ok ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
ccolamd_get_debug ("ccolamd") ;
|
||||
#endif
|
||||
|
||||
|
|
@ -1849,11 +1849,11 @@ PUBLIC Int CCOLAMD_2 /* returns TRUE if successful, FALSE otherwise */
|
|||
/* === Order the supercolumns =========================================== */
|
||||
|
||||
ngarbage = find_ordering (n_row, n_col, Alen, Row, Col, A, p,
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
n_col2,
|
||||
#endif
|
||||
max_deg, 2*nnz, cset, cset_start,
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
n_cset,
|
||||
#endif
|
||||
cmember, Front_npivcol, Front_nrows, Front_ncols, Front_parent,
|
||||
|
|
@ -1958,7 +1958,7 @@ PUBLIC Int CCOLAMD_2 /* returns TRUE if successful, FALSE otherwise */
|
|||
{
|
||||
k = Col [col].shared2.order ;
|
||||
cs = CMEMBER (col) ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
dead_cols [cs]-- ;
|
||||
#endif
|
||||
ASSERT (k >= cset_start [cs] && k < cset_start [cs+1]) ;
|
||||
|
|
@ -1970,7 +1970,7 @@ PUBLIC Int CCOLAMD_2 /* returns TRUE if successful, FALSE otherwise */
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
for (i = 0 ; i < n_cset ; i++)
|
||||
{
|
||||
ASSERT (dead_cols [i] == 0) ;
|
||||
|
|
@ -2220,7 +2220,7 @@ PRIVATE Int init_rows_cols /* returns TRUE if OK, or FALSE otherwise */
|
|||
{
|
||||
DEBUG1 (("ccolamd: reconstructing column form, matrix jumbled\n")) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
/* make sure column lengths are correct */
|
||||
for (col = 0 ; col < n_col ; col++)
|
||||
{
|
||||
|
|
@ -2337,7 +2337,7 @@ PRIVATE void init_scoring
|
|||
Int nnewlyempty_col ; /* number of newly empty cols */
|
||||
Int ne ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_count ; /* debug only. */
|
||||
#endif
|
||||
|
||||
|
|
@ -2535,7 +2535,7 @@ PRIVATE void init_scoring
|
|||
/* yet). Rows may contain dead columns, but all live rows contain at */
|
||||
/* least one live column. */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_count = 0 ;
|
||||
#endif
|
||||
|
||||
|
|
@ -2545,7 +2545,7 @@ PRIVATE void init_scoring
|
|||
head [c] = EMPTY ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_structures (n_row, n_col, Row, Col, A, cmember, cset_start) ;
|
||||
#endif
|
||||
|
||||
|
|
@ -2584,14 +2584,14 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
CColamd_Col Col [ ], /* of size n_col+1 */
|
||||
Int A [ ], /* column form and row form of A */
|
||||
Int head [ ], /* of size n_col+1 */
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_col2, /* Remaining columns to order */
|
||||
#endif
|
||||
Int max_deg, /* Maximum row degree */
|
||||
Int pfree, /* index of first free slot (2*nnz on entry) */
|
||||
Int cset [ ], /* constraint set of A */
|
||||
Int cset_start [ ], /* pointer to the start of every cset */
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_cset, /* number of csets */
|
||||
#endif
|
||||
Int cmember [ ], /* col -> cset mapping */
|
||||
|
|
@ -2645,7 +2645,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
Int colend ; /* pointer to last column in current cset */
|
||||
Int deadcol ; /* number of dense & null columns in a cset */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_d ; /* debug loop counter */
|
||||
Int debug_step = 0 ; /* debug loop counter */
|
||||
Int cols_thickness = 0 ; /* the thickness of the columns in current */
|
||||
|
|
@ -2681,7 +2681,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
ASSERT (min_score <= n_col) ;
|
||||
ASSERT (head [min_score] >= EMPTY) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
for (debug_d = 0 ; debug_d < min_score ; debug_d++)
|
||||
{
|
||||
ASSERT (head [debug_d] == EMPTY) ;
|
||||
|
|
@ -2706,7 +2706,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
return (ngarbage) ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
for (col = 0 ; col <= n_col ; col++)
|
||||
{
|
||||
ASSERT (head [col] == EMPTY) ;
|
||||
|
|
@ -2761,7 +2761,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
min_score = MIN (min_score, score) ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("degree lists initialized \n")) ;
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head, min_score,
|
||||
((cset_start [current_set+1]-cset_start [current_set])-deadcol),
|
||||
|
|
@ -2769,7 +2769,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
#endif
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
if (debug_step % 100 == 0)
|
||||
{
|
||||
DEBUG2 (("\n... Step k: "ID" out of n_col2: "ID"\n", k, n_col2)) ;
|
||||
|
|
@ -2829,7 +2829,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
/* garbage collection has wiped out Row [ ].shared2.mark array */
|
||||
tag_mark = clear_mark (0, max_mark, n_row, Row) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_matrix (n_row, n_col, Row, Col, A) ;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -2880,7 +2880,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
pivot_row_degree += col_thickness ;
|
||||
DEBUG4 (("\t\t\tNew live col in pivrow: "ID"\n",col)) ;
|
||||
}
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
if (col_thickness < 0 && COL_IS_ALIVE (col))
|
||||
{
|
||||
DEBUG4 (("\t\t\tOld live col in pivrow: "ID"\n",col)) ;
|
||||
|
|
@ -2897,7 +2897,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
Col [pivot_col].shared1.thickness = pivot_col_thickness ;
|
||||
max_deg = MAX (max_deg, pivot_row_degree) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG3 (("check2\n")) ;
|
||||
debug_mark (n_row, Row, tag_mark, max_mark) ;
|
||||
#endif
|
||||
|
|
@ -2998,7 +2998,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
/* only columns in current_set will be in degree list */
|
||||
if (CMEMBER (col) == current_set)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
cols_thickness += col_thickness ;
|
||||
#endif
|
||||
cur_score = Col [col].shared2.score ;
|
||||
|
|
@ -3084,7 +3084,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head, min_score,
|
||||
cset_start [current_set+1]-(k+deadcol)-(cols_thickness),
|
||||
max_deg) ;
|
||||
|
|
@ -3154,7 +3154,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
k += Col [col].shared1.thickness ;
|
||||
pivot_col_thickness += Col [col].shared1.thickness ;
|
||||
/* add to column list of front */
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("Mass")) ;
|
||||
dump_super (col, Col, n_col) ;
|
||||
#endif
|
||||
|
|
@ -3205,7 +3205,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
DEBUG3 (("** Supercolumn detection phase. **\n")) ;
|
||||
|
||||
detect_super_cols (
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
n_col, Row,
|
||||
#endif
|
||||
Col, A, head, pivot_row_start, pivot_row_length, cmember) ;
|
||||
|
|
@ -3216,7 +3216,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
KILL_PRINCIPAL_COL (pivot_col) ;
|
||||
|
||||
/* add columns to column list of front */
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("Pivot")) ;
|
||||
dump_super (pivot_col, Col, n_col) ;
|
||||
#endif
|
||||
|
|
@ -3227,7 +3227,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
|
||||
tag_mark = clear_mark (tag_mark+max_deg+1, max_mark, n_row, Row) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG3 (("check3\n")) ;
|
||||
debug_mark (n_row, Row, tag_mark, max_mark) ;
|
||||
#endif
|
||||
|
|
@ -3300,7 +3300,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head,
|
||||
min_score, cset_start [current_set+1]-(k+deadcol), max_deg) ;
|
||||
#endif
|
||||
|
|
@ -3342,7 +3342,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
pivot_row, pivot_row_degree)) ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("Front "ID" : "ID" "ID" "ID" ", nfr,
|
||||
Front_npivcol [nfr], Front_nrows [nfr], Front_ncols [nfr])) ;
|
||||
DEBUG1 ((" cols:[ ")) ;
|
||||
|
|
@ -3405,7 +3405,7 @@ PRIVATE void detect_super_cols
|
|||
(
|
||||
/* === Parameters ======================================================= */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
/* these two parameters are only needed when debugging is enabled: */
|
||||
Int n_col, /* number of columns of A */
|
||||
CColamd_Row Row [ ], /* of size n_row+1 */
|
||||
|
|
@ -3536,7 +3536,7 @@ PRIVATE void detect_super_cols
|
|||
ASSERT (Col [super_c].lastcol < n_col) ;
|
||||
Col [Col [super_c].lastcol].nextcol = c ;
|
||||
Col [super_c].lastcol = Col [c].lastcol ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
/* dump the supercolumn */
|
||||
DEBUG1 (("Super")) ;
|
||||
dump_super (super_c, Col, n_col) ;
|
||||
|
|
@ -3594,7 +3594,7 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
Int c ; /* a column index */
|
||||
Int length ; /* length of a row or column */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_rows ;
|
||||
DEBUG2 (("Defrag..\n")) ;
|
||||
for (psrc = &A[0] ; psrc < pfree ; psrc++) ASSERT (*psrc >= 0) ;
|
||||
|
|
@ -3646,7 +3646,7 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
ASSERT (ROW_IS_ALIVE (r)) ;
|
||||
/* flag the start of the row with the one's complement of row */
|
||||
*psrc = ONES_COMPLEMENT (r) ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_rows++ ;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -3681,7 +3681,7 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
}
|
||||
}
|
||||
Row [r].length = (Int) (pdest - &A [Row [r].start]) ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_rows-- ;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -4036,7 +4036,7 @@ GLOBAL void CCOLAMD_postorder
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
{
|
||||
Int nels, ff, nchild ;
|
||||
DEBUG1 (("\n\n================================ ccolamd_postorder:\n"));
|
||||
|
|
@ -4075,7 +4075,7 @@ GLOBAL void CCOLAMD_postorder
|
|||
if (Nv [i] > 0 && Child [i] != EMPTY)
|
||||
{
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int nchild ;
|
||||
DEBUG1 (("Before partial sort, element "ID"\n", i)) ;
|
||||
nchild = 0 ;
|
||||
|
|
@ -4129,7 +4129,7 @@ GLOBAL void CCOLAMD_postorder
|
|||
Sibling [fprev] = bigf ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("After partial sort, element "ID"\n", i)) ;
|
||||
for (f = Child [i] ; f != EMPTY ; f = Sibling [f])
|
||||
{
|
||||
|
|
@ -4251,7 +4251,7 @@ GLOBAL Int CCOLAMD_post_tree
|
|||
Order [i] = k++ ;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("\nStack:")) ;
|
||||
for (h = head ; h >= 0 ; h--)
|
||||
{
|
||||
|
|
@ -4273,7 +4273,7 @@ GLOBAL Int CCOLAMD_post_tree
|
|||
|
||||
/* When debugging is disabled, the remainder of this file is ignored. */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
|
||||
|
||||
/* ========================================================================== */
|
||||
|
|
|
|||
106
colamd/colamd.c
106
colamd/colamd.c
|
|
@ -622,12 +622,12 @@
|
|||
/* ========================================================================== */
|
||||
|
||||
/* Ensure that debugging is turned off: */
|
||||
#ifndef NDEBUG
|
||||
#define NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
#define SPQR_NDEBUG
|
||||
#endif
|
||||
|
||||
/* turn on debugging by uncommenting the following line
|
||||
#undef NDEBUG
|
||||
#undef SPQR_NDEBUG
|
||||
*/
|
||||
|
||||
/*
|
||||
|
|
@ -672,7 +672,7 @@
|
|||
#include "matrix.h"
|
||||
#endif /* MATLAB_MEX_FILE */
|
||||
|
||||
#if !defined (NPRINT) || !defined (NDEBUG)
|
||||
#if !defined (NPRINT) || !defined (SPQR_NDEBUG)
|
||||
#include <stdio.h>
|
||||
#endif
|
||||
|
||||
|
|
@ -892,10 +892,10 @@ PRIVATE void order_children
|
|||
PRIVATE void detect_super_cols
|
||||
(
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int n_col,
|
||||
Colamd_Row Row [],
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
Colamd_Col Col [],
|
||||
Int A [],
|
||||
|
|
@ -932,7 +932,7 @@ PRIVATE void print_report
|
|||
/* === Debugging prototypes and definitions ================================= */
|
||||
/* ========================================================================== */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
|
|
@ -997,7 +997,7 @@ PRIVATE void debug_structures
|
|||
Int n_col2
|
||||
) ;
|
||||
|
||||
#else /* NDEBUG */
|
||||
#else /* SPQR_NDEBUG */
|
||||
|
||||
/* === No debugging ========================================================= */
|
||||
|
||||
|
|
@ -1009,7 +1009,7 @@ PRIVATE void debug_structures
|
|||
|
||||
#define ASSERT(expression)
|
||||
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* ========================================================================== */
|
||||
/* === USER-CALLABLE ROUTINES: ============================================== */
|
||||
|
|
@ -1182,9 +1182,9 @@ PUBLIC Int SYMAMD_MAIN /* return TRUE if OK, FALSE otherwise */
|
|||
double cknobs [COLAMD_KNOBS] ; /* knobs for colamd */
|
||||
double default_knobs [COLAMD_KNOBS] ; /* default knobs for colamd */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
colamd_get_debug ("symamd") ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Check the input arguments ======================================== */
|
||||
|
||||
|
|
@ -1495,9 +1495,9 @@ PUBLIC Int COLAMD_MAIN /* returns TRUE if successful, FALSE otherwise*/
|
|||
Int aggressive ; /* do aggressive absorption */
|
||||
int ok ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
colamd_get_debug ("colamd") ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Check the input arguments ======================================== */
|
||||
|
||||
|
|
@ -1848,7 +1848,7 @@ PRIVATE Int init_rows_cols /* returns TRUE if OK, or FALSE otherwise */
|
|||
{
|
||||
DEBUG0 (("colamd: reconstructing column form, matrix jumbled\n")) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
/* make sure column lengths are correct */
|
||||
for (col = 0 ; col < n_col ; col++)
|
||||
{
|
||||
|
|
@ -1868,7 +1868,7 @@ PRIVATE Int init_rows_cols /* returns TRUE if OK, or FALSE otherwise */
|
|||
ASSERT (p [col] == 0) ;
|
||||
}
|
||||
/* now p is all zero (different than when debugging is turned off) */
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Compute col pointers ========================================= */
|
||||
|
||||
|
|
@ -1948,9 +1948,9 @@ PRIVATE void init_scoring
|
|||
Int max_deg ; /* maximum row degree */
|
||||
Int next_col ; /* Used to add to degree list.*/
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_count ; /* debug only. */
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Extract knobs ==================================================== */
|
||||
|
||||
|
|
@ -2105,15 +2105,15 @@ PRIVATE void init_scoring
|
|||
/* yet). Rows may contain dead columns, but all live rows contain at */
|
||||
/* least one live column. */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_structures (n_row, n_col, Row, Col, A, n_col2) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Initialize degree lists ========================================== */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_count = 0 ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* clear the hash buckets */
|
||||
for (c = 0 ; c <= n_col ; c++)
|
||||
|
|
@ -2157,19 +2157,19 @@ PRIVATE void init_scoring
|
|||
/* see if this score is less than current min */
|
||||
min_score = MIN (min_score, score) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_count++ ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG1 (("colamd: Live cols %d out of %d, non-princ: %d\n",
|
||||
debug_count, n_col, n_col-debug_count)) ;
|
||||
ASSERT (debug_count == n_col2) ;
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head, min_score, n_col2, max_deg) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Return number of remaining columns, and max row degree =========== */
|
||||
|
||||
|
|
@ -2240,10 +2240,10 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
Int next_col ; /* Used by Dlist operations. */
|
||||
Int ngarbage ; /* number of garbage collections performed */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_d ; /* debug loop counter */
|
||||
Int debug_step = 0 ; /* debug loop counter */
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Initialization and clear mark ==================================== */
|
||||
|
||||
|
|
@ -2258,7 +2258,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
for (k = 0 ; k < n_col2 ; /* 'k' is incremented below */)
|
||||
{
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
if (debug_step % 100 == 0)
|
||||
{
|
||||
DEBUG2 (("\n... Step k: %d out of n_col2: %d\n", k, n_col2)) ;
|
||||
|
|
@ -2271,7 +2271,7 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
debug_deg_lists (n_row, n_col, Row, Col, head,
|
||||
min_score, n_col2-k, max_deg) ;
|
||||
debug_matrix (n_row, n_col, Row, Col, A) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Select pivot column, and order it ============================ */
|
||||
|
||||
|
|
@ -2280,12 +2280,12 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
ASSERT (min_score <= n_col) ;
|
||||
ASSERT (head [min_score] >= EMPTY) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
for (debug_d = 0 ; debug_d < min_score ; debug_d++)
|
||||
{
|
||||
ASSERT (head [debug_d] == EMPTY) ;
|
||||
}
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* get pivot column from head of minimum degree list */
|
||||
while (head [min_score] == EMPTY && min_score < n_col)
|
||||
|
|
@ -2327,9 +2327,9 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
/* garbage collection has wiped out the Row[].shared2.mark array */
|
||||
tag_mark = clear_mark (0, max_mark, n_row, Row) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_matrix (n_row, n_col, Row, Col, A) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
}
|
||||
|
||||
/* === Compute pivot row pattern ==================================== */
|
||||
|
|
@ -2380,10 +2380,10 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
Col [pivot_col].shared1.thickness = pivot_col_thickness ;
|
||||
max_deg = MAX (max_deg, pivot_row_degree) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG3 (("check2\n")) ;
|
||||
debug_mark (n_row, Row, tag_mark, max_mark) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Kill all rows used to construct pivot row ==================== */
|
||||
|
||||
|
|
@ -2515,10 +2515,10 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head,
|
||||
min_score, n_col2-k-pivot_row_degree, max_deg) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Add up set differences for each column ======================= */
|
||||
|
||||
|
|
@ -2627,9 +2627,9 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
|
||||
detect_super_cols (
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
n_col, Row,
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
Col, A, head, pivot_row_start, pivot_row_length) ;
|
||||
|
||||
|
|
@ -2641,10 +2641,10 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
|
||||
tag_mark = clear_mark (tag_mark+max_deg+1, max_mark, n_row, Row) ;
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
DEBUG3 (("check3\n")) ;
|
||||
debug_mark (n_row, Row, tag_mark, max_mark) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Finalize the new pivot row, and column scores ================ */
|
||||
|
||||
|
|
@ -2708,10 +2708,10 @@ PRIVATE Int find_ordering /* return the number of garbage collections */
|
|||
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_deg_lists (n_row, n_col, Row, Col, head,
|
||||
min_score, n_col2-k, max_deg) ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Resurrect the new pivot row ================================== */
|
||||
|
||||
|
|
@ -2859,11 +2859,11 @@ PRIVATE void detect_super_cols
|
|||
(
|
||||
/* === Parameters ======================================================= */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
/* these two parameters are only needed when debugging is enabled: */
|
||||
Int n_col, /* number of columns of A */
|
||||
Colamd_Row Row [], /* of size n_row+1 */
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
Colamd_Col Col [], /* of size n_col+1 */
|
||||
Int A [], /* row indices of A */
|
||||
|
|
@ -3033,12 +3033,12 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
Int c ; /* a column index */
|
||||
Int length ; /* length of a row or column */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
Int debug_rows ;
|
||||
DEBUG2 (("Defrag..\n")) ;
|
||||
for (psrc = &A[0] ; psrc < pfree ; psrc++) ASSERT (*psrc >= 0) ;
|
||||
debug_rows = 0 ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
||||
/* === Defragment the columns =========================================== */
|
||||
|
||||
|
|
@ -3085,9 +3085,9 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
ASSERT (ROW_IS_ALIVE (r)) ;
|
||||
/* flag the start of the row with the one's complement of row */
|
||||
*psrc = ONES_COMPLEMENT (r) ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_rows++ ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -3121,9 +3121,9 @@ PRIVATE Int garbage_collection /* returns the new value of pfree */
|
|||
}
|
||||
Row [r].length = (Int) (pdest - &A [Row [r].start]) ;
|
||||
ASSERT (Row [r].length > 0) ;
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
debug_rows-- ;
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
}
|
||||
}
|
||||
/* ensure we found all the rows */
|
||||
|
|
@ -3311,7 +3311,7 @@ PRIVATE void print_report
|
|||
|
||||
/* When debugging is disabled, the remainder of this file is ignored. */
|
||||
|
||||
#ifndef NDEBUG
|
||||
#ifndef SPQR_NDEBUG
|
||||
|
||||
|
||||
/* ========================================================================== */
|
||||
|
|
@ -3608,4 +3608,4 @@ PRIVATE void colamd_get_debug
|
|||
method, colamd_debug)) ;
|
||||
}
|
||||
|
||||
#endif /* NDEBUG */
|
||||
#endif /* SPQR_NDEBUG */
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ AC_CONFIG_SRCDIR([colamd/colamd.c])
|
|||
AC_CONFIG_SRCDIR([spqr_mini/spqr_front.cpp])
|
||||
AC_CONFIG_SRCDIR([base/DSFVector.cpp])
|
||||
AC_CONFIG_SRCDIR([geometry/Cal3_S2.cpp])
|
||||
AC_CONFIG_SRCDIR([inference/SymbolicFactor.cpp])
|
||||
AC_CONFIG_SRCDIR([inference/SymbolicFactorGraph.cpp])
|
||||
AC_CONFIG_SRCDIR([linear/GaussianFactor.cpp])
|
||||
AC_CONFIG_SRCDIR([nonlinear/NonlinearOptimizer.cpp])
|
||||
AC_CONFIG_SRCDIR([slam/pose2SLAM.cpp])
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
#include <iostream>
|
||||
|
||||
// for all nonlinear keys
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/nonlinear/Key.h>
|
||||
|
||||
// for points and poses
|
||||
#include <gtsam/geometry/Point2.h>
|
||||
|
|
|
|||
|
|
@ -12,10 +12,10 @@
|
|||
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/slam/PriorFactor.h>
|
||||
#include <gtsam/geometry/Rot2.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <gtsam/nonlinear/Key.h>
|
||||
#include <gtsam/nonlinear/LieConfig-inl.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph-inl.h>
|
||||
#include <gtsam/nonlinear/NonlinearOptimizer-inl.h>
|
||||
|
|
|
|||
|
|
@ -51,6 +51,10 @@ AM_LDFLAGS = $(BOOST_LDFLAGS) $(boost_serialization)
|
|||
LDADD = libgeometry.la ../base/libbase.la ../CppUnitLite/libCppUnitLite.a
|
||||
AM_DEFAULT_SOURCE_EXT = .cpp
|
||||
|
||||
if USE_ACCELERATE_MACOS
|
||||
AM_LDFLAGS += -Wl,/System/Library/Frameworks/Accelerate.framework/Accelerate
|
||||
endif
|
||||
|
||||
# rule to run an executable
|
||||
%.run: % $(LDADD)
|
||||
./$^
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ namespace gtsam {
|
|||
Point2(): x_(0), y_(0) {}
|
||||
Point2(const Point2 &p) : x_(p.x_), y_(p.y_) {}
|
||||
Point2(double x, double y): x_(x), y_(y) {}
|
||||
Point2(const Vector& v) : x_(v(0)), y_(v(1)) {}
|
||||
Point2(const Vector& v) : x_(v(0)), y_(v(1)) { assert(v.size() == 2); }
|
||||
|
||||
/** dimension of the variable - used to autodetect sizes */
|
||||
inline static size_t Dim() { return dimension; }
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ namespace gtsam {
|
|||
#ifdef SLOW_BUT_CORRECT_EXPMAP
|
||||
|
||||
Pose2 Pose2::Expmap(const Vector& xi) {
|
||||
assert(xi.size() == 3);
|
||||
Point2 v(xi(0),xi(1));
|
||||
double w = xi(2);
|
||||
if (fabs(w) < 1e-5)
|
||||
|
|
@ -71,6 +72,7 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
Pose2 Pose2::Expmap(const Vector& v) {
|
||||
assert(v.size() == 3);
|
||||
return Pose2(v[0], v[1], v[2]);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ namespace gtsam {
|
|||
|
||||
/** Constructor from 3*3 matrix */
|
||||
Pose2(const Matrix &T) :
|
||||
r_(Rot2::atan2(T(1, 0), T(0, 0))), t_(T(0, 2), T(1, 2)) {}
|
||||
r_(Rot2::atan2(T(1, 0), T(0, 0))), t_(T(0, 2), T(1, 2)) { assert(T.size1()==3 && T.size2()==3); }
|
||||
|
||||
/** print with optional string */
|
||||
void print(const std::string& s = "") const;
|
||||
|
|
@ -154,6 +154,7 @@ namespace gtsam {
|
|||
*/
|
||||
Matrix AdjointMap() const;
|
||||
inline Vector Adjoint(const Vector& xi) const {
|
||||
assert(xi.size() == 3);
|
||||
return AdjointMap()*xi;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
// Log map at identity - return the canonical coordinates of this rotation
|
||||
inline Vector Rot3::Logmap(const Rot3& R) {
|
||||
Vector Rot3::Logmap(const Rot3& R) {
|
||||
double tr = R.r1().x()+R.r2().y()+R.r3().z();
|
||||
if (fabs(tr-3.0) < 1e-10) { // when theta = 0, +-2pi, +-4pi, etc.
|
||||
return zero(3);
|
||||
|
|
|
|||
|
|
@ -10,14 +10,16 @@
|
|||
#include <fstream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
|
||||
#include <boost/assign/std/vector.hpp> // for +=
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
//#include "FactorGraph-inl.h"
|
||||
//#include "Conditional.h"
|
||||
#include "Conditional.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
|
@ -27,9 +29,8 @@ namespace gtsam {
|
|||
template<class Conditional>
|
||||
void BayesNet<Conditional>::print(const string& s) const {
|
||||
cout << s << ":\n";
|
||||
Symbol key;
|
||||
BOOST_REVERSE_FOREACH(sharedConditional conditional,conditionals_)
|
||||
conditional->print("Node[" + (string)(conditional->key()) + "]");
|
||||
conditional->print((boost::format("Node[%1%]") % conditional->key()).str());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -39,6 +40,25 @@ namespace gtsam {
|
|||
return equal(conditionals_.begin(),conditionals_.end(),cbn.conditionals_.begin(),equals_star<Conditional>(tol));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesNet<Conditional>::permuteWithInverse(const Permutation& inversePermutation) {
|
||||
BOOST_FOREACH(sharedConditional conditional, conditionals_) {
|
||||
conditional->permuteWithInverse(inversePermutation);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
bool BayesNet<Conditional>::permuteSeparatorWithInverse(const Permutation& inversePermutation) {
|
||||
bool separatorChanged = false;
|
||||
BOOST_FOREACH(sharedConditional conditional, conditionals_) {
|
||||
if(conditional->permuteSeparatorWithInverse(inversePermutation))
|
||||
separatorChanged = true;
|
||||
}
|
||||
return separatorChanged;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesNet<Conditional>::push_back(const BayesNet<Conditional> bn) {
|
||||
|
|
@ -55,8 +75,8 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
Ordering BayesNet<Conditional>::ordering() const {
|
||||
Ordering ord;
|
||||
list<varid_t> BayesNet<Conditional>::ordering() const {
|
||||
list<varid_t> ord;
|
||||
BOOST_FOREACH(sharedConditional conditional,conditionals_)
|
||||
ord.push_back(conditional->key());
|
||||
return ord;
|
||||
|
|
@ -67,10 +87,10 @@ namespace gtsam {
|
|||
void BayesNet<Conditional>::saveGraph(const std::string &s) const {
|
||||
ofstream of(s.c_str());
|
||||
of<< "digraph G{\n";
|
||||
BOOST_FOREACH(sharedConditional conditional,conditionals_) {
|
||||
Symbol child = conditional->key();
|
||||
BOOST_FOREACH(const Symbol& parent,conditional->parents()) {
|
||||
of << (string)parent << "->" << (string)child << endl;
|
||||
BOOST_FOREACH(const_sharedConditional conditional,conditionals_) {
|
||||
varid_t child = conditional->key();
|
||||
BOOST_FOREACH(varid_t parent, conditional->parents()) {
|
||||
of << parent << "->" << child << endl;
|
||||
}
|
||||
}
|
||||
of<<"}";
|
||||
|
|
@ -81,10 +101,10 @@ namespace gtsam {
|
|||
|
||||
template<class Conditional>
|
||||
typename BayesNet<Conditional>::sharedConditional
|
||||
BayesNet<Conditional>::operator[](const Symbol& key) const {
|
||||
const_iterator it = find_if(conditionals_.begin(),conditionals_.end(),onKey<Conditional>(key));
|
||||
if (it == conditionals_.end()) throw(invalid_argument(
|
||||
"BayesNet::operator['"+(std::string)key+"']: not found"));
|
||||
BayesNet<Conditional>::operator[](varid_t key) const {
|
||||
const_iterator it = find_if(conditionals_.begin(), conditionals_.end(), boost::lambda::bind(&Conditional::key, *boost::lambda::_1) == key);
|
||||
if (it == conditionals_.end()) throw(invalid_argument((boost::format(
|
||||
"BayesNet::operator['%1%']: not found") % key).str()));
|
||||
return *it;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,16 +10,16 @@
|
|||
|
||||
#include <list>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
//#include <boost/serialization/list.hpp>
|
||||
//#include <boost/serialization/shared_ptr.hpp>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/Permutation.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
|
||||
/**
|
||||
* Bayes network
|
||||
* This is the base class for SymbolicBayesNet, DiscreteBayesNet, and GaussianBayesNet
|
||||
|
|
@ -31,10 +31,15 @@ namespace gtsam {
|
|||
|
||||
public:
|
||||
|
||||
typedef typename boost::shared_ptr<BayesNet<Conditional> > shared_ptr;
|
||||
|
||||
/** We store shared pointers to Conditional densities */
|
||||
typedef typename boost::shared_ptr<Conditional> sharedConditional;
|
||||
typedef typename boost::shared_ptr<const Conditional> const_sharedConditional;
|
||||
typedef typename std::list<sharedConditional> Conditionals;
|
||||
|
||||
typedef typename Conditionals::const_iterator iterator;
|
||||
typedef typename Conditionals::const_reverse_iterator reverse_iterator;
|
||||
typedef typename Conditionals::const_iterator const_iterator;
|
||||
typedef typename Conditionals::const_reverse_iterator const_reverse_iterator;
|
||||
|
||||
|
|
@ -77,19 +82,31 @@ namespace gtsam {
|
|||
*/
|
||||
inline void pop_front() {conditionals_.pop_front();}
|
||||
|
||||
/** Permute the variables in the BayesNet */
|
||||
void permuteWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** Permute the variables when only separator variables need to be permuted.
|
||||
* Returns true if any reordered variables appeared in the separator and
|
||||
* false if not.
|
||||
*/
|
||||
bool permuteSeparatorWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** size is the number of nodes */
|
||||
inline size_t size() const {
|
||||
return conditionals_.size();
|
||||
}
|
||||
|
||||
/** return keys in reverse topological sort order, i.e., elimination order */
|
||||
Ordering ordering() const;
|
||||
std::list<varid_t> ordering() const;
|
||||
|
||||
/** SLOW O(n) random access to Conditional by key */
|
||||
sharedConditional operator[](const Symbol& key) const;
|
||||
sharedConditional operator[](varid_t key) const;
|
||||
|
||||
/** return last node in ordering */
|
||||
inline sharedConditional back() { return conditionals_.back(); }
|
||||
inline sharedConditional& back() { return conditionals_.back(); }
|
||||
|
||||
/** return last node in ordering */
|
||||
boost::shared_ptr<const Conditional> back() const { return conditionals_.back(); }
|
||||
|
||||
/** return iterators. FD: breaks encapsulation? */
|
||||
inline const_iterator const begin() const {return conditionals_.begin();}
|
||||
|
|
|
|||
|
|
@ -9,17 +9,20 @@
|
|||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator +=
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
#include <fstream>
|
||||
using namespace boost::assign;
|
||||
namespace lam = boost::lambda;
|
||||
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
#include <gtsam/inference/BayesTree.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -32,7 +35,7 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
BayesTree<Conditional>::Clique::Clique(const sharedConditional& conditional) {
|
||||
separator_ = conditional->parents();
|
||||
separator_.assign(conditional->parents().begin(), conditional->parents().end());
|
||||
this->push_back(conditional);
|
||||
}
|
||||
|
||||
|
|
@ -40,31 +43,66 @@ namespace gtsam {
|
|||
template<class Conditional>
|
||||
BayesTree<Conditional>::Clique::Clique(const BayesNet<Conditional>& bayesNet)
|
||||
: BayesNet<Conditional>(bayesNet) {
|
||||
separator_ = (*bayesNet.rbegin())->parents();
|
||||
if(bayesNet.size() > 0) {
|
||||
#ifndef NDEBUG
|
||||
// Debug check that each parent variable is either a frontal variable in
|
||||
// a later conditional in the Bayes net fragment, or that the parent is
|
||||
// also a parent of the last conditional. This checks that the fragment
|
||||
// is "enough like a clique". todo: this should really check that the
|
||||
// fragment *is* a clique.
|
||||
if(bayesNet.size() > 1) {
|
||||
typename BayesNet<Conditional>::const_reverse_iterator cond = bayesNet.rbegin();
|
||||
++ cond;
|
||||
typename Conditional::shared_ptr lastConditional = *cond;
|
||||
for( ; cond != bayesNet.rend(); ++cond)
|
||||
for(typename Conditional::const_iterator parent=(*cond)->beginParents(); parent!=(*cond)->endParents(); ++parent) {
|
||||
bool infragment = false;
|
||||
typename BayesNet<Conditional>::const_reverse_iterator parentCond = cond;
|
||||
do {
|
||||
if(*parent == (*parentCond)->key())
|
||||
infragment = true;
|
||||
--parentCond;
|
||||
} while(parentCond != bayesNet.rbegin());
|
||||
assert(infragment || find(lastConditional->beginParents(), lastConditional->endParents(), *parent) != lastConditional->endParents());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// separator_.assign((*bayesNet.rbegin())->parents().begin(), (*bayesNet.rbegin())->parents().end());
|
||||
separator_.assign((*bayesNet.rbegin())->beginParents(), (*bayesNet.rbegin())->endParents());
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
Ordering BayesTree<Conditional>::Clique::keys() const {
|
||||
Ordering frontal_keys = this->ordering(), keys = separator_;
|
||||
keys.splice(keys.begin(),frontal_keys);
|
||||
vector<varid_t> BayesTree<Conditional>::Clique::keys() const {
|
||||
vector<varid_t> keys;
|
||||
keys.reserve(this->size() + separator_.size());
|
||||
BOOST_FOREACH(const sharedConditional conditional, *this) {
|
||||
keys.push_back(conditional->key());
|
||||
}
|
||||
keys.insert(keys.end(), separator_.begin(), separator_.end());
|
||||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesTree<Conditional>::Clique::print(const string& s) const {
|
||||
cout << s;
|
||||
cout << s << "Clique ";
|
||||
BOOST_FOREACH(const sharedConditional& conditional, this->conditionals_) { cout << conditional->key() << " "; }
|
||||
cout << "| ";
|
||||
BOOST_FOREACH(const varid_t sep, separator_) { cout << sep << " "; }
|
||||
cout << "\n";
|
||||
BOOST_FOREACH(const sharedConditional& conditional, this->conditionals_) {
|
||||
conditional->print("conditioanl");
|
||||
cout << " " << (string)(conditional->key());
|
||||
conditional->print(" " + s + "conditional");
|
||||
// cout << " " << conditional->key();
|
||||
}
|
||||
if (!separator_.empty()) {
|
||||
cout << " :";
|
||||
BOOST_FOREACH(const Symbol& key, separator_)
|
||||
cout << " " << (std::string)key;
|
||||
}
|
||||
cout << endl;
|
||||
// if (!separator_.empty()) {
|
||||
// cout << " :";
|
||||
// BOOST_FOREACH(varid_t key, separator_)
|
||||
// cout << " " << key;
|
||||
// }
|
||||
// cout << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -84,6 +122,39 @@ namespace gtsam {
|
|||
child->printTree(indent+" ");
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesTree<Conditional>::Clique::permuteWithInverse(const Permutation& inversePermutation) {
|
||||
BayesNet<Conditional>::permuteWithInverse(inversePermutation);
|
||||
BOOST_FOREACH(varid_t& separatorKey, separator_) { separatorKey = inversePermutation[separatorKey]; }
|
||||
if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation);
|
||||
BOOST_FOREACH(const shared_ptr& child, children_) {
|
||||
child->permuteWithInverse(inversePermutation);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
bool BayesTree<Conditional>::Clique::permuteSeparatorWithInverse(const Permutation& inversePermutation) {
|
||||
bool changed = BayesNet<Conditional>::permuteSeparatorWithInverse(inversePermutation);
|
||||
#ifndef NDEBUG
|
||||
if(!changed) {
|
||||
BOOST_FOREACH(varid_t& separatorKey, separator_) { assert(separatorKey == inversePermutation[separatorKey]); }
|
||||
BOOST_FOREACH(const shared_ptr& child, children_) {
|
||||
assert(child->permuteSeparatorWithInverse(inversePermutation) == false);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if(changed) {
|
||||
BOOST_FOREACH(varid_t& separatorKey, separator_) { separatorKey = inversePermutation[separatorKey]; }
|
||||
if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation);
|
||||
BOOST_FOREACH(const shared_ptr& child, children_) {
|
||||
(void)child->permuteSeparatorWithInverse(inversePermutation);
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
typename BayesTree<Conditional>::CliqueData
|
||||
|
|
@ -136,9 +207,9 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
first = true;
|
||||
BOOST_FOREACH(const Symbol& sep, clique->separator_) {
|
||||
BOOST_FOREACH(varid_t sep, clique->separator_) {
|
||||
if(!first) parent += ","; first = false;
|
||||
parent += ((string)sep).c_str();
|
||||
parent += (boost::format("%1%")%sep).str();
|
||||
}
|
||||
parent += "\"];\n";
|
||||
s << parent;
|
||||
|
|
@ -177,115 +248,115 @@ namespace gtsam {
|
|||
return stats;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// The shortcut density is a conditional P(S|R) of the separator of this
|
||||
// clique on the root. We can compute it recursively from the parent shortcut
|
||||
// P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p
|
||||
// TODO, why do we actually return a shared pointer, why does eliminate?
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
BayesNet<Conditional>
|
||||
BayesTree<Conditional>::Clique::shortcut(shared_ptr R) {
|
||||
// A first base case is when this clique or its parent is the root,
|
||||
// in which case we return an empty Bayes net.
|
||||
// /* ************************************************************************* */
|
||||
// // The shortcut density is a conditional P(S|R) of the separator of this
|
||||
// // clique on the root. We can compute it recursively from the parent shortcut
|
||||
// // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p
|
||||
// // TODO, why do we actually return a shared pointer, why does eliminate?
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional>
|
||||
// BayesTree<Conditional>::Clique::shortcut(shared_ptr R) {
|
||||
// // A first base case is when this clique or its parent is the root,
|
||||
// // in which case we return an empty Bayes net.
|
||||
//
|
||||
// if (R.get()==this || parent_==R) {
|
||||
// BayesNet<Conditional> empty;
|
||||
// return empty;
|
||||
// }
|
||||
//
|
||||
// // The parent clique has a Conditional for each frontal node in Fp
|
||||
// // so we can obtain P(Fp|Sp) in factor graph form
|
||||
// FactorGraph<Factor> p_Fp_Sp(*parent_);
|
||||
//
|
||||
// // If not the base case, obtain the parent shortcut P(Sp|R) as factors
|
||||
// FactorGraph<Factor> p_Sp_R(parent_->shortcut<Factor>(R));
|
||||
//
|
||||
// // now combine P(Cp|R) = P(Fp|Sp) * P(Sp|R)
|
||||
// FactorGraph<Factor> p_Cp_R = combine(p_Fp_Sp, p_Sp_R);
|
||||
//
|
||||
// // Eliminate into a Bayes net with ordering designed to integrate out
|
||||
// // any variables not in *our* separator. Variables to integrate out must be
|
||||
// // eliminated first hence the desired ordering is [Cp\S S].
|
||||
// // However, an added wrinkle is that Cp might overlap with the root.
|
||||
// // Keys corresponding to the root should not be added to the ordering at all.
|
||||
//
|
||||
// // Get the key list Cp=Fp+Sp, which will form the basis for the integrands
|
||||
// Ordering integrands = parent_->keys();
|
||||
//
|
||||
// // Start ordering with the separator
|
||||
// Ordering ordering = separator_;
|
||||
//
|
||||
// // remove any variables in the root, after this integrands = Cp\R, ordering = S\R
|
||||
// BOOST_FOREACH(varid_t key, R->ordering()) {
|
||||
// integrands.remove(key);
|
||||
// ordering.remove(key);
|
||||
// }
|
||||
//
|
||||
// // remove any variables in the separator, after this integrands = Cp\R\S
|
||||
// BOOST_FOREACH(varid_t key, separator_) integrands.remove(key);
|
||||
//
|
||||
// // form the ordering as [Cp\R\S S\R]
|
||||
// BOOST_REVERSE_FOREACH(varid_t key, integrands) ordering.push_front(key);
|
||||
//
|
||||
// // eliminate to get marginal
|
||||
// BayesNet<Conditional> p_S_R = eliminate<Factor,Conditional>(p_Cp_R,ordering);
|
||||
//
|
||||
// // remove all integrands
|
||||
// BOOST_FOREACH(varid_t key, integrands) p_S_R.pop_front();
|
||||
//
|
||||
// // return the parent shortcut P(Sp|R)
|
||||
// return p_S_R;
|
||||
// }
|
||||
|
||||
if (R.get()==this || parent_==R) {
|
||||
BayesNet<Conditional> empty;
|
||||
return empty;
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// // P(C) = \int_R P(F|S) P(S|R) P(R)
|
||||
// // TODO: Maybe we should integrate given parent marginal P(Cp),
|
||||
// // \int(Cp\S) P(F|S)P(S|Cp)P(Cp)
|
||||
// // Because the root clique could be very big.
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor>
|
||||
// BayesTree<Conditional>::Clique::marginal(shared_ptr R) {
|
||||
// // If we are the root, just return this root
|
||||
// if (R.get()==this) return *R;
|
||||
//
|
||||
// // Combine P(F|S), P(S|R), and P(R)
|
||||
// BayesNet<Conditional> p_FSR = this->shortcut<Factor>(R);
|
||||
// p_FSR.push_front(*this);
|
||||
// p_FSR.push_back(*R);
|
||||
//
|
||||
// // Find marginal on the keys we are interested in
|
||||
// return marginalize<Factor,Conditional>(p_FSR,keys());
|
||||
// }
|
||||
|
||||
// The parent clique has a Conditional for each frontal node in Fp
|
||||
// so we can obtain P(Fp|Sp) in factor graph form
|
||||
FactorGraph<Factor> p_Fp_Sp(*parent_);
|
||||
|
||||
// If not the base case, obtain the parent shortcut P(Sp|R) as factors
|
||||
FactorGraph<Factor> p_Sp_R(parent_->shortcut<Factor>(R));
|
||||
|
||||
// now combine P(Cp|R) = P(Fp|Sp) * P(Sp|R)
|
||||
FactorGraph<Factor> p_Cp_R = combine(p_Fp_Sp, p_Sp_R);
|
||||
|
||||
// Eliminate into a Bayes net with ordering designed to integrate out
|
||||
// any variables not in *our* separator. Variables to integrate out must be
|
||||
// eliminated first hence the desired ordering is [Cp\S S].
|
||||
// However, an added wrinkle is that Cp might overlap with the root.
|
||||
// Keys corresponding to the root should not be added to the ordering at all.
|
||||
|
||||
// Get the key list Cp=Fp+Sp, which will form the basis for the integrands
|
||||
Ordering integrands = parent_->keys();
|
||||
|
||||
// Start ordering with the separator
|
||||
Ordering ordering = separator_;
|
||||
|
||||
// remove any variables in the root, after this integrands = Cp\R, ordering = S\R
|
||||
BOOST_FOREACH(const Symbol& key, R->ordering()) {
|
||||
integrands.remove(key);
|
||||
ordering.remove(key);
|
||||
}
|
||||
|
||||
// remove any variables in the separator, after this integrands = Cp\R\S
|
||||
BOOST_FOREACH(const Symbol& key, separator_) integrands.remove(key);
|
||||
|
||||
// form the ordering as [Cp\R\S S\R]
|
||||
BOOST_REVERSE_FOREACH(const Symbol& key, integrands) ordering.push_front(key);
|
||||
|
||||
// eliminate to get marginal
|
||||
BayesNet<Conditional> p_S_R = eliminate<Factor,Conditional>(p_Cp_R,ordering);
|
||||
|
||||
// remove all integrands
|
||||
BOOST_FOREACH(const Symbol& key, integrands) p_S_R.pop_front();
|
||||
|
||||
// return the parent shortcut P(Sp|R)
|
||||
return p_S_R;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// P(C) = \int_R P(F|S) P(S|R) P(R)
|
||||
// TODO: Maybe we should integrate given parent marginal P(Cp),
|
||||
// \int(Cp\S) P(F|S)P(S|Cp)P(Cp)
|
||||
// Because the root clique could be very big.
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
FactorGraph<Factor>
|
||||
BayesTree<Conditional>::Clique::marginal(shared_ptr R) {
|
||||
// If we are the root, just return this root
|
||||
if (R.get()==this) return *R;
|
||||
|
||||
// Combine P(F|S), P(S|R), and P(R)
|
||||
BayesNet<Conditional> p_FSR = this->shortcut<Factor>(R);
|
||||
p_FSR.push_front(*this);
|
||||
p_FSR.push_back(*R);
|
||||
|
||||
// Find marginal on the keys we are interested in
|
||||
return marginalize<Factor,Conditional>(p_FSR,keys());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// P(C1,C2) = \int_R P(F1|S1) P(S1|R) P(F2|S1) P(S2|R) P(R)
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
pair<FactorGraph<Factor>, Ordering>
|
||||
BayesTree<Conditional>::Clique::joint(shared_ptr C2, shared_ptr R) {
|
||||
// For now, assume neither is the root
|
||||
|
||||
// Combine P(F1|S1), P(S1|R), P(F2|S2), P(S2|R), and P(R)
|
||||
sharedBayesNet bn(new BayesNet<Conditional>);
|
||||
if (!isRoot()) bn->push_back(*this); // P(F1|S1)
|
||||
if (!isRoot()) bn->push_back(shortcut<Factor>(R)); // P(S1|R)
|
||||
if (!C2->isRoot()) bn->push_back(*C2); // P(F2|S2)
|
||||
if (!C2->isRoot()) bn->push_back(C2->shortcut<Factor>(R)); // P(S2|R)
|
||||
bn->push_back(*R); // P(R)
|
||||
|
||||
// Find the keys of both C1 and C2
|
||||
Ordering keys12 = keys();
|
||||
BOOST_FOREACH(const Symbol& key,C2->keys()) keys12.push_back(key);
|
||||
keys12.unique();
|
||||
|
||||
// Calculate the marginal
|
||||
return make_pair(marginalize<Factor,Conditional>(*bn,keys12), keys12);
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// // P(C1,C2) = \int_R P(F1|S1) P(S1|R) P(F2|S1) P(S2|R) P(R)
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// pair<FactorGraph<Factor>, Ordering>
|
||||
// BayesTree<Conditional>::Clique::joint(shared_ptr C2, shared_ptr R) {
|
||||
// // For now, assume neither is the root
|
||||
//
|
||||
// // Combine P(F1|S1), P(S1|R), P(F2|S2), P(S2|R), and P(R)
|
||||
// sharedBayesNet bn(new BayesNet<Conditional>);
|
||||
// if (!isRoot()) bn->push_back(*this); // P(F1|S1)
|
||||
// if (!isRoot()) bn->push_back(shortcut<Factor>(R)); // P(S1|R)
|
||||
// if (!C2->isRoot()) bn->push_back(*C2); // P(F2|S2)
|
||||
// if (!C2->isRoot()) bn->push_back(C2->shortcut<Factor>(R)); // P(S2|R)
|
||||
// bn->push_back(*R); // P(R)
|
||||
//
|
||||
// // Find the keys of both C1 and C2
|
||||
// Ordering keys12 = keys();
|
||||
// BOOST_FOREACH(varid_t key,C2->keys()) keys12.push_back(key);
|
||||
// keys12.unique();
|
||||
//
|
||||
// // Calculate the marginal
|
||||
// return make_pair(marginalize<Factor,Conditional>(*bn,keys12), keys12);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
|
|
@ -303,10 +374,12 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
typename BayesTree<Conditional>::sharedClique BayesTree<Conditional>::addClique
|
||||
(const sharedConditional& conditional, sharedClique parent_clique) {
|
||||
typename BayesTree<Conditional>::sharedClique BayesTree<Conditional>::addClique(
|
||||
const sharedConditional& conditional, sharedClique parent_clique) {
|
||||
sharedClique new_clique(new Clique(conditional));
|
||||
nodes_.insert(make_pair(conditional->key(), new_clique));
|
||||
varid_t key = conditional->key();
|
||||
nodes_.resize(std::max(key+1, nodes_.size()));
|
||||
nodes_[key] = new_clique;
|
||||
if (parent_clique != NULL) {
|
||||
new_clique->parent_ = parent_clique;
|
||||
parent_clique->children_.push_back(new_clique);
|
||||
|
|
@ -316,16 +389,51 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
typename BayesTree<Conditional>::sharedClique BayesTree<Conditional>::addClique
|
||||
(const sharedConditional& conditional, list<sharedClique>& child_cliques) {
|
||||
typename BayesTree<Conditional>::sharedClique BayesTree<Conditional>::addClique(
|
||||
const sharedConditional& conditional, list<sharedClique>& child_cliques) {
|
||||
sharedClique new_clique(new Clique(conditional));
|
||||
nodes_.insert(make_pair(conditional->key(), new_clique));
|
||||
varid_t key = conditional->key();
|
||||
nodes_.resize(max(key+1, nodes_.size()));
|
||||
nodes_[key] = new_clique;
|
||||
new_clique->children_ = child_cliques;
|
||||
BOOST_FOREACH(sharedClique& child, child_cliques)
|
||||
child->parent_ = new_clique;
|
||||
return new_clique;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
inline void BayesTree<Conditional>::addToCliqueFront(const sharedConditional& conditional, const sharedClique& clique) {
|
||||
static const bool debug = false;
|
||||
#ifndef NDEBUG
|
||||
// Debug check to make sure the conditional variable is ordered lower than
|
||||
// its parents and that all of its parents are present either in this
|
||||
// clique or its separator.
|
||||
BOOST_FOREACH(varid_t parent, conditional->parents()) {
|
||||
assert(parent > conditional->key());
|
||||
bool hasParent = false;
|
||||
const Clique& cliquer(*clique);
|
||||
BOOST_FOREACH(const sharedConditional& child, cliquer) {
|
||||
if(child->key() == parent) {
|
||||
hasParent = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!hasParent)
|
||||
if(find(clique->separator_.begin(), clique->separator_.end(), parent) != clique->separator_.end())
|
||||
hasParent = true;
|
||||
assert(hasParent);
|
||||
}
|
||||
#endif
|
||||
if(debug) conditional->print("Adding conditional ");
|
||||
if(debug) clique->print("To clique ");
|
||||
varid_t key = conditional->key();
|
||||
nodes_.resize(std::max(key+1, nodes_.size()));
|
||||
nodes_[key] = clique;
|
||||
clique->push_front(conditional);
|
||||
if(debug) clique->print("Expanded clique is ");
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesTree<Conditional>::removeClique(sharedClique clique) {
|
||||
|
|
@ -333,14 +441,18 @@ namespace gtsam {
|
|||
if (clique->isRoot())
|
||||
root_.reset();
|
||||
else // detach clique from parent
|
||||
clique->parent_->children_.remove(clique);
|
||||
clique->parent_.lock()->children_.remove(clique);
|
||||
|
||||
// orphan my children
|
||||
BOOST_FOREACH(sharedClique child, clique->children_)
|
||||
child->parent_.reset();
|
||||
child->parent_ = typename BayesTree<Conditional>::Clique::weak_ptr();
|
||||
|
||||
BOOST_FOREACH(const Symbol& key, clique->ordering()) {
|
||||
nodes_.erase(key);
|
||||
BOOST_FOREACH(varid_t key, clique->separator_) {
|
||||
nodes_[key].reset();
|
||||
}
|
||||
const Clique& cliquer(*clique);
|
||||
BOOST_FOREACH(const sharedConditional& conditional, cliquer) {
|
||||
nodes_[conditional->key()].reset();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -352,10 +464,9 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
BayesTree<Conditional>::BayesTree(const BayesNet<Conditional>& bayesNet) {
|
||||
IndexTable<Symbol> index(bayesNet.ordering());
|
||||
typename BayesNet<Conditional>::const_reverse_iterator rit;
|
||||
for ( rit=bayesNet.rbegin(); rit != bayesNet.rend(); ++rit )
|
||||
insert(*rit, index);
|
||||
insert(*rit);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -375,12 +486,10 @@ namespace gtsam {
|
|||
sharedClique new_clique;
|
||||
typename BayesNet<Conditional>::sharedConditional conditional;
|
||||
BOOST_REVERSE_FOREACH(conditional, bayesNet) {
|
||||
if (!new_clique.get()) {
|
||||
if (!new_clique.get())
|
||||
new_clique = addClique(conditional,childRoots);
|
||||
} else {
|
||||
nodes_.insert(make_pair(conditional->key(), new_clique));
|
||||
new_clique->push_front(conditional);
|
||||
}
|
||||
else
|
||||
addToCliqueFront(conditional, new_clique);
|
||||
}
|
||||
|
||||
root_ = new_clique;
|
||||
|
|
@ -401,11 +510,11 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
// binary predicate to test equality of a pair for use in equals
|
||||
template<class Conditional>
|
||||
bool check_pair(
|
||||
const pair<Symbol,typename BayesTree<Conditional>::sharedClique >& v1,
|
||||
const pair<Symbol,typename BayesTree<Conditional>::sharedClique >& v2
|
||||
bool check_sharedCliques(
|
||||
const typename BayesTree<Conditional>::sharedClique& v1,
|
||||
const typename BayesTree<Conditional>::sharedClique& v2
|
||||
) {
|
||||
return v1.first == v2.first && v1.second->equals(*(v2.second));
|
||||
return v1->equals(*v2);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -413,56 +522,77 @@ namespace gtsam {
|
|||
bool BayesTree<Conditional>::equals(const BayesTree<Conditional>& other,
|
||||
double tol) const {
|
||||
return size()==other.size() &&
|
||||
equal(nodes_.begin(),nodes_.end(),other.nodes_.begin(),check_pair<Conditional>);
|
||||
std::equal(nodes_.begin(), nodes_.end(), other.nodes_.begin(), &check_sharedCliques<Conditional>);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
Symbol BayesTree<Conditional>::findParentClique(const list<Symbol>& parents,
|
||||
const IndexTable<Symbol>& index) const {
|
||||
boost::optional<Symbol> parentCliqueRepresentative;
|
||||
boost::optional<size_t> lowest;
|
||||
BOOST_FOREACH(const Symbol& p, parents) {
|
||||
size_t i = index(p);
|
||||
if (!lowest || i<*lowest) {
|
||||
lowest.reset(i);
|
||||
parentCliqueRepresentative.reset(p);
|
||||
}
|
||||
}
|
||||
if (!lowest) throw
|
||||
invalid_argument("BayesTree::findParentClique: no parents given or key not present in index");
|
||||
return *parentCliqueRepresentative;
|
||||
template<class Container>
|
||||
inline varid_t BayesTree<Conditional>::findParentClique(const Container& parents) const {
|
||||
typename Container::const_iterator lowestOrderedParent = min_element(parents.begin(), parents.end());
|
||||
assert(lowestOrderedParent != parents.end());
|
||||
return *lowestOrderedParent;
|
||||
|
||||
// boost::optional<varid_t> parentCliqueRepresentative;
|
||||
// boost::optional<size_t> lowest;
|
||||
// BOOST_FOREACH(varid_t p, parents) {
|
||||
// size_t i = index(p);
|
||||
// if (!lowest || i<*lowest) {
|
||||
// lowest.reset(i);
|
||||
// parentCliqueRepresentative.reset(p);
|
||||
// }
|
||||
// }
|
||||
// if (!lowest) throw
|
||||
// invalid_argument("BayesTree::findParentClique: no parents given or key not present in index");
|
||||
// return *parentCliqueRepresentative;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesTree<Conditional>::insert(const sharedConditional& conditional,
|
||||
const IndexTable<Symbol>& index)
|
||||
void BayesTree<Conditional>::insert(const sharedConditional& conditional)
|
||||
{
|
||||
static const bool debug = false;
|
||||
|
||||
// get key and parents
|
||||
const Symbol& key = conditional->key();
|
||||
list<Symbol> parents = conditional->parents(); // todo: const reference?
|
||||
typename Conditional::Parents parents = conditional->parents(); // todo: const reference?
|
||||
|
||||
if(debug) conditional->print("Adding conditional ");
|
||||
|
||||
// if no parents, start a new root clique
|
||||
if (parents.empty()) {
|
||||
if(debug) cout << "No parents so making root" << endl;
|
||||
root_ = addClique(conditional);
|
||||
return;
|
||||
}
|
||||
|
||||
// otherwise, find the parent clique by using the index data structure
|
||||
// to find the lowest-ordered parent
|
||||
Symbol parentRepresentative = findParentClique(parents, index);
|
||||
varid_t parentRepresentative = findParentClique(parents);
|
||||
if(debug) cout << "First-eliminated parent is " << parentRepresentative << endl;
|
||||
sharedClique parent_clique = (*this)[parentRepresentative];
|
||||
if(debug) parent_clique->print("Parent clique is ");
|
||||
|
||||
// if the parents and parent clique have the same size, add to parent clique
|
||||
if (parent_clique->size() == parents.size()) {
|
||||
nodes_.insert(make_pair(key, parent_clique));
|
||||
parent_clique->push_front(conditional);
|
||||
return;
|
||||
if (parent_clique->size() == size_t(parents.size())) {
|
||||
if(debug) cout << "Adding to parent clique" << endl;
|
||||
#ifndef NDEBUG
|
||||
// Debug check that the parent keys of the new conditional match the keys
|
||||
// currently in the clique.
|
||||
// list<varid_t>::const_iterator parent = parents.begin();
|
||||
// typename Clique::const_iterator cond = parent_clique->begin();
|
||||
// while(parent != parents.end()) {
|
||||
// assert(cond != parent_clique->end());
|
||||
// assert(*parent == (*cond)->key());
|
||||
// ++ parent;
|
||||
// ++ cond;
|
||||
// }
|
||||
#endif
|
||||
addToCliqueFront(conditional, parent_clique);
|
||||
} else {
|
||||
if(debug) cout << "Starting new clique" << endl;
|
||||
// otherwise, start a new clique and add it to the tree
|
||||
addClique(conditional,parent_clique);
|
||||
}
|
||||
|
||||
// otherwise, start a new clique and add it to the tree
|
||||
addClique(conditional,parent_clique);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -471,6 +601,8 @@ namespace gtsam {
|
|||
typename BayesTree<Conditional>::sharedClique BayesTree<Conditional>::insert(
|
||||
const BayesNet<Conditional>& bayesNet, list<sharedClique>& children, bool isRootClique)
|
||||
{
|
||||
static const bool debug = false;
|
||||
|
||||
if (bayesNet.size() == 0)
|
||||
throw invalid_argument("BayesTree::insert: empty bayes net!");
|
||||
|
||||
|
|
@ -478,12 +610,11 @@ namespace gtsam {
|
|||
sharedClique new_clique;
|
||||
typename BayesNet<Conditional>::sharedConditional conditional;
|
||||
BOOST_REVERSE_FOREACH(conditional, bayesNet) {
|
||||
if (!new_clique.get()) {
|
||||
if(debug) conditional->print("Inserting conditional: ");
|
||||
if (!new_clique.get())
|
||||
new_clique = addClique(conditional,children);
|
||||
} else {
|
||||
nodes_.insert(make_pair(conditional->key(), new_clique));
|
||||
new_clique->push_front(conditional);
|
||||
}
|
||||
else
|
||||
addToCliqueFront(conditional, new_clique);
|
||||
}
|
||||
|
||||
if (isRootClique) root_ = new_clique;
|
||||
|
|
@ -491,82 +622,117 @@ namespace gtsam {
|
|||
return new_clique;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// First finds clique marginal then marginalizes that
|
||||
/* ************************************************************************* */
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
FactorGraph<Factor>
|
||||
BayesTree<Conditional>::marginal(const Symbol& key) const {
|
||||
|
||||
// get clique containing key
|
||||
sharedClique clique = (*this)[key];
|
||||
|
||||
// calculate or retrieve its marginal
|
||||
FactorGraph<Factor> cliqueMarginal = clique->marginal<Factor>(root_);
|
||||
|
||||
// create an ordering where only the requested key is not eliminated
|
||||
Ordering ord = clique->keys();
|
||||
ord.remove(key);
|
||||
|
||||
// partially eliminate, remaining factor graph is requested marginal
|
||||
eliminate<Factor,Conditional>(cliqueMarginal,ord);
|
||||
return cliqueMarginal;
|
||||
void BayesTree<Conditional>::fillNodesIndex(const sharedClique& subtree) {
|
||||
// Add each frontal variable of this root node
|
||||
BOOST_FOREACH(const typename Conditional::shared_ptr& cond, *subtree) { nodes_[cond->key()] = subtree; }
|
||||
// Fill index for each child
|
||||
BOOST_FOREACH(const typename BayesTree<Conditional>::sharedClique& child, subtree->children_) {
|
||||
fillNodesIndex(child); }
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
BayesNet<Conditional>
|
||||
BayesTree<Conditional>::marginalBayesNet(const Symbol& key) const {
|
||||
void BayesTree<Conditional>::insert(const sharedClique& subtree) {
|
||||
if(subtree) {
|
||||
// Find the parent clique of the new subtree. By the running intersection
|
||||
// property, those separator variables in the subtree that are ordered
|
||||
// lower than the highest frontal variable of the subtree root will all
|
||||
// appear in the separator of the subtree root.
|
||||
if(subtree->separator_.empty()) {
|
||||
assert(!root_);
|
||||
root_ = subtree;
|
||||
} else {
|
||||
varid_t parentRepresentative = findParentClique(subtree->separator_);
|
||||
sharedClique parent = (*this)[parentRepresentative];
|
||||
parent->children_ += subtree;
|
||||
subtree->parent_ = parent; // set new parent!
|
||||
}
|
||||
|
||||
// calculate marginal as a factor graph
|
||||
FactorGraph<Factor> fg = this->marginal<Factor>(key);
|
||||
|
||||
// eliminate further to Bayes net
|
||||
return eliminate<Factor,Conditional>(fg,Ordering(key));
|
||||
// Now fill in the nodes index
|
||||
if(subtree->back()->key() > (nodes_.size() - 1))
|
||||
nodes_.resize(subtree->back()->key() + 1);
|
||||
fillNodesIndex(subtree);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Find two cliques, their joint, then marginalizes
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
FactorGraph<Factor>
|
||||
BayesTree<Conditional>::joint(const Symbol& key1, const Symbol& key2) const {
|
||||
// /* ************************************************************************* */
|
||||
// // First finds clique marginal then marginalizes that
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor>
|
||||
// BayesTree<Conditional>::marginal(varid_t key) const {
|
||||
//
|
||||
// // get clique containing key
|
||||
// sharedClique clique = (*this)[key];
|
||||
//
|
||||
// // calculate or retrieve its marginal
|
||||
// FactorGraph<Factor> cliqueMarginal = clique->marginal<Factor>(root_);
|
||||
//
|
||||
// // create an ordering where only the requested key is not eliminated
|
||||
// vector<varid_t> ord = clique->keys();
|
||||
// ord.remove(key);
|
||||
//
|
||||
// // partially eliminate, remaining factor graph is requested marginal
|
||||
// eliminate<Factor,Conditional>(cliqueMarginal,ord);
|
||||
// return cliqueMarginal;
|
||||
// }
|
||||
|
||||
// get clique C1 and C2
|
||||
sharedClique C1 = (*this)[key1], C2 = (*this)[key2];
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional>
|
||||
// BayesTree<Conditional>::marginalBayesNet(varid_t key) const {
|
||||
//
|
||||
// // calculate marginal as a factor graph
|
||||
// FactorGraph<Factor> fg = this->marginal<Factor>(key);
|
||||
//
|
||||
// // eliminate further to Bayes net
|
||||
// return eliminate<Factor,Conditional>(fg,Ordering(key));
|
||||
// }
|
||||
|
||||
// calculate joint
|
||||
Ordering ord;
|
||||
FactorGraph<Factor> p_C1C2;
|
||||
boost::tie(p_C1C2,ord) = C1->joint<Factor>(C2,root_);
|
||||
// /* ************************************************************************* */
|
||||
// // Find two cliques, their joint, then marginalizes
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor>
|
||||
// BayesTree<Conditional>::joint(varid_t key1, varid_t key2) const {
|
||||
//
|
||||
// // get clique C1 and C2
|
||||
// sharedClique C1 = (*this)[key1], C2 = (*this)[key2];
|
||||
//
|
||||
// // calculate joint
|
||||
// Ordering ord;
|
||||
// FactorGraph<Factor> p_C1C2;
|
||||
// boost::tie(p_C1C2,ord) = C1->joint<Factor>(C2,root_);
|
||||
//
|
||||
// // create an ordering where both requested keys are not eliminated
|
||||
// ord.remove(key1);
|
||||
// ord.remove(key2);
|
||||
//
|
||||
// // partially eliminate, remaining factor graph is requested joint
|
||||
// // TODO, make eliminate functional
|
||||
// eliminate<Factor,Conditional>(p_C1C2,ord);
|
||||
// return p_C1C2;
|
||||
// }
|
||||
|
||||
// create an ordering where both requested keys are not eliminated
|
||||
ord.remove(key1);
|
||||
ord.remove(key2);
|
||||
|
||||
// partially eliminate, remaining factor graph is requested joint
|
||||
// TODO, make eliminate functional
|
||||
eliminate<Factor,Conditional>(p_C1C2,ord);
|
||||
return p_C1C2;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
BayesNet<Conditional>
|
||||
BayesTree<Conditional>::jointBayesNet(const Symbol& key1, const Symbol& key2) const {
|
||||
|
||||
// calculate marginal as a factor graph
|
||||
FactorGraph<Factor> fg = this->joint<Factor>(key1,key2);
|
||||
|
||||
// eliminate further to Bayes net
|
||||
Ordering ordering;
|
||||
ordering += key1, key2;
|
||||
return eliminate<Factor,Conditional>(fg,ordering);
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional>
|
||||
// BayesTree<Conditional>::jointBayesNet(varid_t key1, varid_t key2) const {
|
||||
//
|
||||
// // calculate marginal as a factor graph
|
||||
// FactorGraph<Factor> fg = this->joint<Factor>(key1,key2);
|
||||
//
|
||||
// // eliminate further to Bayes net
|
||||
// Ordering ordering;
|
||||
// ordering += key1, key2;
|
||||
// return eliminate<Factor,Conditional>(fg,ordering);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
|
|
@ -591,7 +757,7 @@ namespace gtsam {
|
|||
this->removeClique(clique);
|
||||
|
||||
// remove path above me
|
||||
this->removePath(clique->parent_, bn, orphans);
|
||||
this->removePath(clique->parent_.lock(), bn, orphans);
|
||||
|
||||
// add children to list of orphans (splice also removed them from clique->children_)
|
||||
orphans.splice (orphans.begin(), clique->children_);
|
||||
|
|
@ -603,17 +769,20 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
void BayesTree<Conditional>::removeTop(const list<Symbol>& keys,
|
||||
template<class Container>
|
||||
void BayesTree<Conditional>::removeTop(const Container& keys,
|
||||
BayesNet<Conditional>& bn, typename BayesTree<Conditional>::Cliques& orphans) {
|
||||
|
||||
// process each key of the new factor
|
||||
BOOST_FOREACH(const Symbol& key, keys) {
|
||||
BOOST_FOREACH(const varid_t& key, keys) {
|
||||
|
||||
// get the clique
|
||||
typename Nodes::iterator clique(nodes_.find(key));
|
||||
if(clique != nodes_.end()) {
|
||||
// remove path from clique to root
|
||||
this->removePath(clique->second, bn, orphans);
|
||||
if(key < nodes_.size()) {
|
||||
const sharedClique& clique(nodes_[key]);
|
||||
if(clique) {
|
||||
// remove path from clique to root
|
||||
this->removePath(clique, bn, orphans);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,12 +13,12 @@
|
|||
//#include <boost/serialization/map.hpp>
|
||||
//#include <boost/serialization/list.hpp>
|
||||
#include <stdexcept>
|
||||
#include <deque>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/IndexTable.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -42,9 +42,11 @@ namespace gtsam {
|
|||
struct Clique: public BayesNet<Conditional> {
|
||||
|
||||
typedef typename boost::shared_ptr<Clique> shared_ptr;
|
||||
shared_ptr parent_;
|
||||
typedef typename boost::weak_ptr<Clique> weak_ptr;
|
||||
weak_ptr parent_;
|
||||
std::list<shared_ptr> children_;
|
||||
std::list<Symbol> separator_; /** separator keys */
|
||||
std::list<varid_t> separator_; /** separator keys */
|
||||
typename Conditional::FactorType::shared_ptr cachedFactor_;
|
||||
|
||||
friend class BayesTree<Conditional>;
|
||||
|
||||
|
|
@ -56,7 +58,7 @@ namespace gtsam {
|
|||
Clique(const BayesNet<Conditional>& bayesNet);
|
||||
|
||||
/** return keys in frontal:separator order */
|
||||
Ordering keys() const;
|
||||
std::vector<varid_t> keys() const;
|
||||
|
||||
/** print this node */
|
||||
void print(const std::string& s = "") const;
|
||||
|
|
@ -67,29 +69,43 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/** is this the root of a Bayes tree ? */
|
||||
inline bool isRoot() const { return parent_==NULL;}
|
||||
inline bool isRoot() const { return parent_.expired();}
|
||||
|
||||
/** return the const reference of children */
|
||||
std::list<shared_ptr>& children() { return children_; }
|
||||
const std::list<shared_ptr>& children() const { return children_; }
|
||||
|
||||
/** Reference the cached factor */
|
||||
typename Conditional::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; }
|
||||
|
||||
/** The size of subtree rooted at this clique, i.e., nr of Cliques */
|
||||
size_t treeSize() const;
|
||||
|
||||
/** print this node and entire subtree below it */
|
||||
void printTree(const std::string& indent="") const;
|
||||
|
||||
/** return the conditional P(S|Root) on the separator given the root */
|
||||
// TODO: create a cached version
|
||||
template<class Factor>
|
||||
BayesNet<Conditional> shortcut(shared_ptr root);
|
||||
/** Permute the variables in the whole subtree rooted at this clique */
|
||||
void permuteWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** return the marginal P(C) of the clique */
|
||||
template<class Factor>
|
||||
FactorGraph<Factor> marginal(shared_ptr root);
|
||||
/** Permute variables when they only appear in the separators. In this
|
||||
* case the running intersection property will be used to prevent always
|
||||
* traversing the whole tree. Returns whether any separator variables in
|
||||
* this subtree were reordered.
|
||||
*/
|
||||
bool permuteSeparatorWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** return the joint P(C1,C2), where C1==this. TODO: not a method? */
|
||||
template<class Factor>
|
||||
std::pair<FactorGraph<Factor>,Ordering> joint(shared_ptr C2, shared_ptr root);
|
||||
// /** return the conditional P(S|Root) on the separator given the root */
|
||||
// // TODO: create a cached version
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional> shortcut(shared_ptr root);
|
||||
//
|
||||
// /** return the marginal P(C) of the clique */
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor> marginal(shared_ptr root);
|
||||
//
|
||||
// /** return the joint P(C1,C2), where C1==this. TODO: not a method? */
|
||||
// template<class Factor>
|
||||
// std::pair<FactorGraph<Factor>,Ordering> joint(shared_ptr C2, shared_ptr root);
|
||||
};
|
||||
|
||||
// typedef for shared pointers to cliques
|
||||
|
|
@ -116,10 +132,10 @@ namespace gtsam {
|
|||
CliqueStats getStats() const;
|
||||
};
|
||||
|
||||
private:
|
||||
protected:
|
||||
|
||||
/** Map from keys to Clique */
|
||||
typedef SymbolMap<sharedClique> Nodes;
|
||||
typedef std::deque<sharedClique> Nodes;
|
||||
Nodes nodes_;
|
||||
|
||||
/** private helper method for saving the Tree to a text file in GraphViz format */
|
||||
|
|
@ -145,6 +161,15 @@ namespace gtsam {
|
|||
sharedClique addClique(const sharedConditional& conditional,
|
||||
std::list<sharedClique>& child_cliques);
|
||||
|
||||
/** Add a conditional to the front of a clique, i.e. a conditional whose
|
||||
* parents are already in the clique or its separators. This function does
|
||||
* not check for this condition, it just updates the data structures.
|
||||
*/
|
||||
void addToCliqueFront(const sharedConditional& conditional, const sharedClique& clique);
|
||||
|
||||
/** Fill the nodes index for a subtree */
|
||||
void fillNodesIndex(const sharedClique& subtree);
|
||||
|
||||
public:
|
||||
|
||||
/** Create an empty Bayes Tree */
|
||||
|
|
@ -168,7 +193,7 @@ namespace gtsam {
|
|||
*/
|
||||
|
||||
/** Insert a new conditional */
|
||||
void insert(const sharedConditional& conditional, const IndexTable<Symbol>& index);
|
||||
void insert(const sharedConditional& conditional);
|
||||
|
||||
/** Insert a new clique corresponding to the given Bayes net.
|
||||
* It is the caller's responsibility to decide whether the given Bayes net is a valid clique,
|
||||
|
|
@ -177,6 +202,15 @@ namespace gtsam {
|
|||
sharedClique insert(const BayesNet<Conditional>& bayesNet,
|
||||
std::list<sharedClique>& children, bool isRootClique = false);
|
||||
|
||||
/**
|
||||
* Hang a new subtree off of the existing tree. This finds the appropriate
|
||||
* parent clique for the subtree (which may be the root), and updates the
|
||||
* nodes index with the new cliques in the subtree. None of the frontal
|
||||
* variables in the subtree may appear in the separators of the existing
|
||||
* BayesTree.
|
||||
*/
|
||||
void insert(const sharedClique& subtree);
|
||||
|
||||
/**
|
||||
* Querying Bayes trees
|
||||
*/
|
||||
|
|
@ -185,10 +219,11 @@ namespace gtsam {
|
|||
bool equals(const BayesTree<Conditional>& other, double tol = 1e-9) const;
|
||||
|
||||
/**
|
||||
* Find parent clique of a conditional, given an IndexTable constructed from an ordering.
|
||||
* It will look at all parents and return the one with the lowest index in the ordering.
|
||||
* Find parent clique of a conditional. It will look at all parents and
|
||||
* return the one with the lowest index in the ordering.
|
||||
*/
|
||||
Symbol findParentClique(const std::list<Symbol>& parents, const IndexTable<Symbol>& index) const;
|
||||
template<class Container>
|
||||
varid_t findParentClique(const Container& parents) const;
|
||||
|
||||
/** number of cliques */
|
||||
inline size_t size() const {
|
||||
|
|
@ -199,33 +234,32 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/** return root clique */
|
||||
sharedClique root() const {
|
||||
return root_;
|
||||
}
|
||||
const sharedClique& root() const { return root_; }
|
||||
sharedClique& root() { return root_; }
|
||||
|
||||
/** find the clique to which key belongs */
|
||||
sharedClique operator[](const Symbol& key) const {
|
||||
sharedClique operator[](varid_t key) const {
|
||||
return nodes_.at(key);
|
||||
}
|
||||
|
||||
/** Gather data on all cliques */
|
||||
CliqueData getCliqueData() const;
|
||||
|
||||
/** return marginal on any variable */
|
||||
template<class Factor>
|
||||
FactorGraph<Factor> marginal(const Symbol& key) const;
|
||||
|
||||
/** return marginal on any variable, as a Bayes Net */
|
||||
template<class Factor>
|
||||
BayesNet<Conditional> marginalBayesNet(const Symbol& key) const;
|
||||
|
||||
/** return joint on two variables */
|
||||
template<class Factor>
|
||||
FactorGraph<Factor> joint(const Symbol& key1, const Symbol& key2) const;
|
||||
|
||||
/** return joint on two variables as a BayesNet */
|
||||
template<class Factor>
|
||||
BayesNet<Conditional> jointBayesNet(const Symbol& key1, const Symbol& key2) const;
|
||||
// /** return marginal on any variable */
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor> marginal(varid_t key) const;
|
||||
//
|
||||
// /** return marginal on any variable, as a Bayes Net */
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional> marginalBayesNet(varid_t key) const;
|
||||
//
|
||||
// /** return joint on two variables */
|
||||
// template<class Factor>
|
||||
// FactorGraph<Factor> joint(varid_t key1, varid_t key2) const;
|
||||
//
|
||||
// /** return joint on two variables as a BayesNet */
|
||||
// template<class Factor>
|
||||
// BayesNet<Conditional> jointBayesNet(varid_t key1, varid_t key2) const;
|
||||
|
||||
/**
|
||||
* Read only with side effects
|
||||
|
|
@ -254,7 +288,8 @@ namespace gtsam {
|
|||
* Given a list of keys, turn "contaminated" part of the tree back into a factor graph.
|
||||
* Factors and orphans are added to the in/out arguments.
|
||||
*/
|
||||
void removeTop(const std::list<Symbol>& keys, BayesNet<Conditional>& bn, Cliques& orphans);
|
||||
template<class Container>
|
||||
void removeTop(const Container& keys, BayesNet<Conditional>& bn, Cliques& orphans);
|
||||
|
||||
}; // BayesTree
|
||||
|
||||
|
|
|
|||
|
|
@ -20,26 +20,39 @@ namespace gtsam {
|
|||
* Cluster
|
||||
* ************************************************************************* */
|
||||
template<class FG>
|
||||
ClusterTree<FG>::Cluster::Cluster(const FG& fg, const Symbol& key):FG(fg) {
|
||||
template<class Iterator>
|
||||
ClusterTree<FG>::Cluster::Cluster(const FG& fg, varid_t key, Iterator firstSeparator, Iterator lastSeparator) :
|
||||
FG(fg), frontal(1, key), separator(firstSeparator, lastSeparator) {}
|
||||
|
||||
// push the one key as frontal
|
||||
frontal_.push_back(key);
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
template<typename FrontalIt, typename SeparatorIt>
|
||||
ClusterTree<FG>::Cluster::Cluster(
|
||||
const FG& fg, FrontalIt firstFrontal, FrontalIt lastFrontal, SeparatorIt firstSeparator, SeparatorIt lastSeparator) :
|
||||
FG(fg), frontal(firstFrontal, lastFrontal), separator(firstSeparator, lastSeparator) {}
|
||||
|
||||
// the rest are separator keys...
|
||||
BOOST_FOREACH(const Symbol& graphKey, fg.keys())
|
||||
if (graphKey != key)
|
||||
separator_.insert(graphKey);
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
template<typename FrontalIt, typename SeparatorIt>
|
||||
ClusterTree<FG>::Cluster::Cluster(
|
||||
FrontalIt firstFrontal, FrontalIt lastFrontal, SeparatorIt firstSeparator, SeparatorIt lastSeparator) :
|
||||
frontal(firstFrontal, lastFrontal), separator(firstSeparator, lastSeparator) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
void ClusterTree<FG>::Cluster::addChild(typename ClusterTree<FG>::Cluster::shared_ptr child) {
|
||||
children_.push_back(child);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
bool ClusterTree<FG>::Cluster::equals(const ClusterTree<FG>::Cluster& other) const {
|
||||
if (!frontal_.equals(other.frontal_)) return false;
|
||||
if (!separator_.equals(other.separator_)) return false;
|
||||
if (frontal != other.frontal) return false;
|
||||
if (separator != other.separator) return false;
|
||||
if (children_.size() != other.children_.size()) return false;
|
||||
|
||||
typename vector<shared_ptr>::const_iterator it1 = children_.begin();
|
||||
typename vector<shared_ptr>::const_iterator it2 = other.children_.begin();
|
||||
typename list<shared_ptr>::const_iterator it1 = children_.begin();
|
||||
typename list<shared_ptr>::const_iterator it2 = other.children_.begin();
|
||||
for (; it1 != children_.end(); it1++, it2++)
|
||||
if (!(*it1)->equals(**it2)) return false;
|
||||
|
||||
|
|
@ -50,11 +63,11 @@ namespace gtsam {
|
|||
template<class FG>
|
||||
void ClusterTree<FG>::Cluster::print(const string& indent) const {
|
||||
cout << indent;
|
||||
BOOST_FOREACH(const Symbol& key, frontal_)
|
||||
cout << (string) key << " ";
|
||||
cout << ":";
|
||||
BOOST_FOREACH(const Symbol& key, separator_)
|
||||
cout << (string) key << " ";
|
||||
BOOST_FOREACH(const varid_t key, frontal)
|
||||
cout << key << " ";
|
||||
cout << ": ";
|
||||
BOOST_FOREACH(const varid_t key, separator)
|
||||
cout << key << " ";
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,8 +8,15 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <boost/weak_ptr.hpp>
|
||||
|
||||
#include <list>
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -24,20 +31,36 @@ namespace gtsam {
|
|||
protected:
|
||||
|
||||
// the class for subgraphs that also include the pointers to the parents and two children
|
||||
struct Cluster : public FG {
|
||||
|
||||
class Cluster : public FG {
|
||||
public:
|
||||
typedef typename boost::shared_ptr<Cluster> shared_ptr;
|
||||
typedef typename boost::weak_ptr<Cluster> weak_ptr;
|
||||
|
||||
Ordering frontal_; // the frontal variables
|
||||
Unordered separator_; // the separator variables
|
||||
shared_ptr parent_; // the parent cluster
|
||||
std::vector<shared_ptr> children_; // the child clusters
|
||||
const std::vector<varid_t> frontal; // the frontal variables
|
||||
const std::vector<varid_t> separator; // the separator variables
|
||||
|
||||
protected:
|
||||
|
||||
weak_ptr parent_; // the parent cluster
|
||||
std::list<shared_ptr> children_; // the child clusters
|
||||
const typename FG::sharedFactor eliminated_; // the eliminated factor to pass on to the parent
|
||||
|
||||
public:
|
||||
|
||||
// Construct empty clique
|
||||
Cluster() {}
|
||||
|
||||
/* Create a node with a single frontal variable */
|
||||
Cluster(const FG& fg, const Symbol& key);
|
||||
template<typename Iterator>
|
||||
Cluster(const FG& fg, varid_t key, Iterator firstSeparator, Iterator lastSeparator);
|
||||
|
||||
/* Create a node with several frontal variables */
|
||||
template<typename FrontalIt, typename SeparatorIt>
|
||||
Cluster(const FG& fg, FrontalIt firstFrontal, FrontalIt lastFrontal, SeparatorIt firstSeparator, SeparatorIt lastSeparator);
|
||||
|
||||
/* Create a node with several frontal variables */
|
||||
template<typename FrontalIt, typename SeparatorIt>
|
||||
Cluster(FrontalIt firstFrontal, FrontalIt lastFrontal, SeparatorIt firstSeparator, SeparatorIt lastSeparator);
|
||||
|
||||
// print the object
|
||||
void print(const std::string& indent) const;
|
||||
|
|
@ -45,6 +68,15 @@ namespace gtsam {
|
|||
|
||||
// check equality
|
||||
bool equals(const Cluster& other) const;
|
||||
|
||||
// get or set the parent
|
||||
weak_ptr& parent() { return parent_; }
|
||||
|
||||
// get a reference to the children
|
||||
const std::list<shared_ptr>& children() const { return children_; }
|
||||
|
||||
// add a child
|
||||
void addChild(shared_ptr child);
|
||||
};
|
||||
|
||||
// typedef for shared pointers to clusters
|
||||
|
|
@ -63,7 +95,7 @@ namespace gtsam {
|
|||
// print the object
|
||||
void print(const std::string& str) const {
|
||||
std::cout << str << std::endl;
|
||||
if (root_.get()) root_->printTree("");
|
||||
if (root_) root_->printTree("");
|
||||
}
|
||||
|
||||
/** check equality */
|
||||
|
|
|
|||
|
|
@ -8,12 +8,15 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <boost/utility.hpp> // for noncopyable
|
||||
//#include <boost/serialization/string.hpp>
|
||||
//#include <boost/serialization/access.hpp>
|
||||
//#include <boost/serialization/nvp.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/Factor.h>
|
||||
#include <gtsam/inference/Permutation.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -25,58 +28,144 @@ namespace gtsam {
|
|||
* immutable, i.e., practicing functional programming.
|
||||
*/
|
||||
class Conditional: boost::noncopyable, public Testable<Conditional> {
|
||||
protected:
|
||||
|
||||
/** key of random variable */
|
||||
Symbol key_;
|
||||
protected:
|
||||
/** Conditional just uses an internal Factor for storage (a conditional is
|
||||
* really just a special factor anyway, but we do this instead of inherit
|
||||
* to prevent "diamond" inheritance with GaussianFactor and
|
||||
* GaussianConditional.
|
||||
*/
|
||||
Factor factor_;
|
||||
|
||||
/** The first nFrontal variables are frontal and the rest are parents. */
|
||||
size_t nFrontal_;
|
||||
|
||||
ValueWithDefault<bool, true> permuted_;
|
||||
|
||||
public:
|
||||
|
||||
/** empty constructor for serialization */
|
||||
Conditional() {}
|
||||
/** convenience typename for a shared pointer to this class */
|
||||
typedef gtsam::Factor FactorType;
|
||||
typedef boost::shared_ptr<Conditional> shared_ptr;
|
||||
typedef Factor::iterator iterator;
|
||||
typedef Factor::const_iterator const_iterator;
|
||||
typedef boost::iterator_range<const_iterator> Frontals;
|
||||
typedef boost::iterator_range<const_iterator> Parents;
|
||||
|
||||
/** constructor */
|
||||
Conditional(const Symbol& key) : key_(key) {}
|
||||
/** Empty Constructor to make serialization possible */
|
||||
Conditional(){}
|
||||
|
||||
/* destructor */
|
||||
virtual ~Conditional() {
|
||||
}
|
||||
/** No parents */
|
||||
Conditional(varid_t key) : factor_(key), nFrontal_(1) {}
|
||||
|
||||
/** check equality */
|
||||
bool equals(const Conditional& c, double tol = 1e-9) const {
|
||||
return key_ == c.key_;
|
||||
}
|
||||
/** Single parent */
|
||||
Conditional(varid_t key, varid_t parent) : factor_(key, parent), nFrontal_(1) {}
|
||||
|
||||
/** return key */
|
||||
inline const Symbol& key() const {
|
||||
return key_;
|
||||
}
|
||||
/** Two parents */
|
||||
Conditional(varid_t key, varid_t parent1, varid_t parent2) : factor_(key, parent1, parent2), nFrontal_(1) {}
|
||||
|
||||
/** return parent keys */
|
||||
virtual std::list<Symbol> parents() const = 0;
|
||||
/** Three parents */
|
||||
Conditional(varid_t key, varid_t parent1, varid_t parent2, varid_t parent3) : factor_(key, parent1, parent2, parent3), nFrontal_(1) {}
|
||||
|
||||
/** Constructor from a frontal variable and a vector of parents */
|
||||
Conditional(varid_t key, const std::vector<varid_t>& parents) : nFrontal_(1) {
|
||||
factor_.keys_.resize(1+parents.size());
|
||||
*(beginFrontals()) = key;
|
||||
std::copy(parents.begin(), parents.end(), beginParents()); }
|
||||
|
||||
/** Constructor from any number of frontal variables and parents */
|
||||
template<typename Iterator>
|
||||
Conditional(Iterator firstKey, Iterator lastKey, size_t nFrontals) : factor_(firstKey, lastKey), nFrontal_(nFrontals) {}
|
||||
|
||||
/** Special accessor when there is only one frontal variable. */
|
||||
varid_t key() const { assert(nFrontal_==1); return factor_.keys_[0]; }
|
||||
|
||||
/**
|
||||
* Permutes the Conditional, but for efficiency requires the permutation
|
||||
* to already be inverted.
|
||||
*/
|
||||
void permuteWithInverse(const Permutation& inversePermutation) {
|
||||
factor_.permuteWithInverse(inversePermutation); }
|
||||
|
||||
/** Permute the variables when only separator variables need to be permuted.
|
||||
* Returns true if any reordered variables appeared in the separator and
|
||||
* false if not.
|
||||
*/
|
||||
bool permuteSeparatorWithInverse(const Permutation& inversePermutation) {
|
||||
#ifndef NDEBUG
|
||||
BOOST_FOREACH(varid_t key, frontals()) { assert(key == inversePermutation[key]); }
|
||||
#endif
|
||||
bool parentChanged = false;
|
||||
BOOST_FOREACH(varid_t& parent, parents()) {
|
||||
varid_t newParent = inversePermutation[parent];
|
||||
if(parent != newParent) {
|
||||
parentChanged = true;
|
||||
parent = newParent;
|
||||
}
|
||||
}
|
||||
return parentChanged;
|
||||
}
|
||||
|
||||
/** return a const reference to all keys */
|
||||
const std::vector<varid_t>& keys() const { return factor_.keys(); }
|
||||
|
||||
/** return a view of the frontal keys */
|
||||
Frontals frontals() const {
|
||||
return boost::make_iterator_range(beginFrontals(), endFrontals()); }
|
||||
|
||||
/** return a view of the parent keys */
|
||||
Parents parents() const {
|
||||
return boost::make_iterator_range(beginParents(), endParents()); }
|
||||
|
||||
/** return the number of frontals */
|
||||
size_t nrFrontals() const { return nFrontal_; }
|
||||
|
||||
/** return the number of parents */
|
||||
virtual std::size_t nrParents() const = 0;
|
||||
size_t nrParents() const { return factor_.keys_.size() - nFrontal_; }
|
||||
|
||||
/** print */
|
||||
void print(const std::string& s = "Conditional") const {
|
||||
std::cout << s << " P(";
|
||||
BOOST_FOREACH(varid_t key, frontals()) std::cout << " " << key;
|
||||
if (nrParents()>0) std::cout << " |";
|
||||
BOOST_FOREACH(varid_t parent, parents()) std::cout << " " << parent;
|
||||
std::cout << ")" << std::endl;
|
||||
}
|
||||
|
||||
/** check equality */
|
||||
bool equals(const Conditional& c, double tol = 1e-9) const {
|
||||
return nFrontal_ == c.nFrontal_ && factor_.equals(c.factor_, tol); }
|
||||
|
||||
/** Iterators over frontal and parent variables. */
|
||||
const_iterator beginFrontals() const { return factor_.keys_.begin(); }
|
||||
const_iterator endFrontals() const { return factor_.keys_.begin()+nFrontal_; }
|
||||
const_iterator beginParents() const { return factor_.keys_.begin()+nFrontal_; }
|
||||
const_iterator endParents() const { return factor_.keys_.end(); }
|
||||
|
||||
/** Mutable iterators and accessors */
|
||||
iterator beginFrontals() { return factor_.keys_.begin(); }
|
||||
iterator endFrontals() { return factor_.keys_.begin()+nFrontal_; }
|
||||
iterator beginParents() { return factor_.keys_.begin()+nFrontal_; }
|
||||
iterator endParents() { return factor_.keys_.end(); }
|
||||
boost::iterator_range<iterator> frontals() { return boost::make_iterator_range(beginFrontals(), endFrontals()); }
|
||||
boost::iterator_range<iterator> parents() { return boost::make_iterator_range(beginParents(), endParents()); }
|
||||
|
||||
protected:
|
||||
/** Debugging invariant that the keys should be in order, including that the
|
||||
* conditioned variable is numbered lower than the parents.
|
||||
*/
|
||||
void checkSorted() const;
|
||||
|
||||
friend class Factor;
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version) {
|
||||
ar & BOOST_SERIALIZATION_NVP(key_);
|
||||
ar & BOOST_SERIALIZATION_NVP(factor_);
|
||||
ar & BOOST_SERIALIZATION_NVP(nFrontal_);
|
||||
}
|
||||
};
|
||||
|
||||
// predicate to check whether a conditional has the sought key
|
||||
template<class Conditional>
|
||||
class onKey {
|
||||
const Symbol& key_;
|
||||
public:
|
||||
onKey(const Symbol& key) :
|
||||
key_(key) {
|
||||
}
|
||||
bool operator()(const typename Conditional::shared_ptr& conditional) {
|
||||
return (conditional->key() == key_);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,80 +18,192 @@ namespace gtsam {
|
|||
using namespace std;
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
void EliminationTree<FG>::add(const FG& fg, const Symbol& key,
|
||||
const IndexTable<Symbol>& indexTable) {
|
||||
// template<class FG>
|
||||
// void EliminationTree<FG>::add(const FG& fg, varid_t j) {
|
||||
// sharedNode node(new Node(fg, j));
|
||||
// add(node);
|
||||
// }
|
||||
|
||||
// Make a node and put it in the nodes_ array:
|
||||
sharedNode node(new Node(fg, key));
|
||||
size_t j = indexTable(key);
|
||||
nodes_[j] = node;
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
void EliminationTree<FG>::add(const sharedNode& node) {
|
||||
|
||||
// if the separator is empty, this is the root
|
||||
if (node->separator_.empty()) {
|
||||
this->root_ = node;
|
||||
}
|
||||
else {
|
||||
// find parent by iterating over all separator keys, and taking the lowest
|
||||
// one in the ordering. That is the index of the parent clique.
|
||||
size_t parentIndex = nrVariables_;
|
||||
BOOST_FOREACH(const Symbol& j, node->separator_) {
|
||||
size_t index = indexTable(j);
|
||||
if (index<parentIndex) parentIndex = index;
|
||||
}
|
||||
// attach to parent
|
||||
sharedNode& parent = nodes_[parentIndex];
|
||||
if (!parent) throw
|
||||
invalid_argument("EliminationTree::add: parent clique does not exist");
|
||||
node->parent_ = parent;
|
||||
parent->children_.push_back(node);
|
||||
}
|
||||
}
|
||||
assert(node->frontal.size() == 1);
|
||||
varid_t j = node->frontal.front();
|
||||
|
||||
// Make a node and put it in the nodes_ array:
|
||||
nodes_[j] = node;
|
||||
|
||||
// if the separator is empty, this is the root
|
||||
if (node->separator.empty()) {
|
||||
this->root_ = node;
|
||||
}
|
||||
else {
|
||||
// find parent by iterating over all separator keys, and taking the lowest
|
||||
// one in the ordering. That is the index of the parent clique.
|
||||
vector<varid_t>::const_iterator parentIndex = min_element(node->separator.begin(), node->separator.end());
|
||||
assert(parentIndex != node->separator.end());
|
||||
// attach to parent
|
||||
sharedNode& parent = nodes_[*parentIndex];
|
||||
if (!parent) throw
|
||||
invalid_argument("EliminationTree::add: parent clique does not exist");
|
||||
node->parent() = parent;
|
||||
parent->addChild(node);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// template<class FG>
|
||||
// EliminationTree<FG>::EliminationTree(const OrderedGraphs& graphs) :
|
||||
// nrVariables_(graphs.size()), nodes_(nrVariables_) {
|
||||
//
|
||||
// // Get ordering by (map first graphs)
|
||||
// Ordering ordering;
|
||||
// transform(graphs.begin(), graphs.end(), back_inserter(ordering),
|
||||
// _Select1st<typename OrderedGraphs::value_type> ());
|
||||
//
|
||||
// // Create a temporary map from key to ordering index
|
||||
// IndexTable<Symbol> indexTable(ordering);
|
||||
//
|
||||
// // Go over the collection in reverse elimination order
|
||||
// // and add one node for every of the n variables.
|
||||
// BOOST_REVERSE_FOREACH(const NamedGraph& namedGraph, graphs)
|
||||
// add(namedGraph.second, namedGraph.first, indexTable);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
EliminationTree<FG>::EliminationTree(const OrderedGraphs& graphs) :
|
||||
nrVariables_(graphs.size()), nodes_(nrVariables_) {
|
||||
EliminationTree<FG>::EliminationTree(FG& fg) {
|
||||
|
||||
// Get ordering by (map first graphs)
|
||||
Ordering ordering;
|
||||
transform(graphs.begin(), graphs.end(), back_inserter(ordering),
|
||||
_Select1st<typename OrderedGraphs::value_type> ());
|
||||
static const bool debug = false;
|
||||
|
||||
// Create a temporary map from key to ordering index
|
||||
IndexTable<Symbol> indexTable(ordering);
|
||||
// If the factor graph is empty, return an empty index because inside this
|
||||
// if block we assume at least one factor.
|
||||
if(fg.size() > 0) {
|
||||
|
||||
// Go over the collection in reverse elimination order
|
||||
// and add one node for every of the n variables.
|
||||
BOOST_REVERSE_FOREACH(const NamedGraph& namedGraph, graphs)
|
||||
add(namedGraph.second, namedGraph.first, indexTable);
|
||||
}
|
||||
vector<deque<size_t> > clusters;
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
EliminationTree<FG>::EliminationTree(FG& fg, const Ordering& ordering) :
|
||||
nrVariables_(ordering.size()), nodes_(nrVariables_) {
|
||||
// Build clusters
|
||||
{
|
||||
// Find highest-numbered variable
|
||||
varid_t maxVar = 0;
|
||||
BOOST_FOREACH(const typename FG::sharedFactor& factor, fg) {
|
||||
if(factor) {
|
||||
typename FG::factor_type::const_iterator maxj = std::max_element(factor->begin(), factor->end());
|
||||
if(maxj != factor->end() && *maxj > maxVar) maxVar = *maxj;
|
||||
}
|
||||
}
|
||||
// Build index mapping from variable id to factor index - we only use
|
||||
// the first variable because after this variable is eliminated the
|
||||
// factor will no longer exist.
|
||||
clusters.resize(maxVar+1);
|
||||
for(size_t fi=0; fi<fg.size(); ++fi)
|
||||
if(fg[fi] && !fg[fi]->keys().empty()) {
|
||||
typename FG::factor_type::const_iterator firstvar = std::min_element(fg[fi]->begin(), fg[fi]->end());
|
||||
assert(firstvar != fg[fi]->end());
|
||||
clusters[*firstvar].push_back(fi);
|
||||
}
|
||||
}
|
||||
|
||||
// Loop over all variables and get factors that are connected
|
||||
OrderedGraphs graphs;
|
||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
||||
// TODO: a collection of factors is a factor graph and this should be returned
|
||||
// below rather than having to copy. GaussianFactorGraphSet should go...
|
||||
vector<typename FG::sharedFactor> found = fg.findAndRemoveFactors(key);
|
||||
FG fragment;
|
||||
NamedGraph namedGraph(key,fragment);
|
||||
BOOST_FOREACH(const typename FG::sharedFactor& factor, found)
|
||||
namedGraph.second.push_back(factor);
|
||||
graphs.push_back(namedGraph);
|
||||
}
|
||||
// Create column index that will be modified during elimination - this is
|
||||
// not the most efficient way of doing this, a modified version of
|
||||
// Gilbert01bit would be more efficient.
|
||||
vector<deque<size_t> > columnIndex = clusters;
|
||||
|
||||
// Create a temporary map from key to ordering index
|
||||
IndexTable<Symbol> indexTable(ordering);
|
||||
nrVariables_ = columnIndex.size();
|
||||
nodes_.resize(nrVariables_);
|
||||
|
||||
// Go over the collection in reverse elimination order
|
||||
// and add one node for every of the n variables.
|
||||
BOOST_REVERSE_FOREACH(const NamedGraph& namedGraph, graphs)
|
||||
add(namedGraph.second, namedGraph.first, indexTable);
|
||||
// Loop over all variables and get factors that are connected
|
||||
OrderedGraphs graphs;
|
||||
Nodes nodesToAdd; nodesToAdd.reserve(columnIndex.size());
|
||||
for(varid_t j=0; j<columnIndex.size(); ++j) {
|
||||
|
||||
if(debug) cout << "Eliminating " << j << endl;
|
||||
|
||||
// The factor index of the new joint factor
|
||||
size_t jointFactorI = fg.size();
|
||||
|
||||
// Get all of the factors associated with the variable.
|
||||
// If the factor has not already been removed - I think this is
|
||||
// somehow equivalent to the "find root" computation in Gilbert01bit.
|
||||
vector<size_t> involvedFactors; involvedFactors.reserve(columnIndex[j].size());
|
||||
BOOST_FOREACH(const size_t factorI, columnIndex[j]) {
|
||||
if(fg[factorI]) involvedFactors.push_back(factorI);
|
||||
}
|
||||
|
||||
if(!involvedFactors.empty()) {
|
||||
// Compute a mapping (called variableSlots) *from* each involved
|
||||
// variable that will be in the new joint factor *to* the slot in each
|
||||
// removed factor in which that variable appears. For each variable,
|
||||
// this is stored as a vector of slot numbers, stored in order of the
|
||||
// removed factors. The slot number is the max integer value if the
|
||||
// factor does not involve that variable.
|
||||
typedef map<varid_t, vector<varid_t> > VariableSlots;
|
||||
map<varid_t, vector<varid_t> > variableSlots;
|
||||
FG removedFactors; removedFactors.reserve(involvedFactors.size());
|
||||
size_t jointFactorPos = 0;
|
||||
BOOST_FOREACH(const size_t factorI, involvedFactors) {
|
||||
// Remove the factor from the factor graph
|
||||
assert(fg[factorI]);
|
||||
const typename FG::factor_type& removedFactor(*fg[factorI]);
|
||||
assert(removedFactors.size() == jointFactorPos);
|
||||
removedFactors.push_back(fg[factorI]);
|
||||
fg.remove(factorI);
|
||||
|
||||
varid_t factorVarSlot = 0;
|
||||
BOOST_FOREACH(const varid_t involvedVariable, removedFactor.keys()) {
|
||||
|
||||
// Set the slot in this factor for this variable. If the
|
||||
// variable was not already discovered, create an array for it
|
||||
// that we'll fill with the slot indices for each factor that
|
||||
// we're combining. Initially we put the max integer value in
|
||||
// the array entry for each factor that will indicate the factor
|
||||
// does not involve the variable.
|
||||
static vector<varid_t> empty;
|
||||
VariableSlots::iterator thisVarSlots = variableSlots.insert(make_pair(involvedVariable,empty)).first;
|
||||
if(thisVarSlots->second.empty())
|
||||
thisVarSlots->second.resize(involvedFactors.size(), numeric_limits<varid_t>::max());
|
||||
thisVarSlots->second[jointFactorPos] = factorVarSlot;
|
||||
if(debug) cout << " var " << involvedVariable << " rowblock " << jointFactorPos << " comes from factor " << factorI << " slot " << factorVarSlot << endl;
|
||||
|
||||
++ factorVarSlot;
|
||||
}
|
||||
++ jointFactorPos;
|
||||
}
|
||||
assert(variableSlots.begin()->first == j);
|
||||
|
||||
// Now that we know which factors and variables, and where variables
|
||||
// come from and go to, create and eliminate the new joint factor.
|
||||
typename FG::sharedFactor jointFactor = FG::factor_type::Combine(removedFactors, variableSlots);
|
||||
assert(*jointFactor->begin() == j);
|
||||
typename FG::factor_type::Conditional::shared_ptr conditional = jointFactor->eliminateFirst();
|
||||
assert(conditional->key() == j);
|
||||
|
||||
// Add the eliminated joint factor to the partially-eliminated factor graph
|
||||
fg.push_back(jointFactor);
|
||||
assert(jointFactorI == fg.size()-1);
|
||||
|
||||
// Add the joint factor to the column index for this variable if
|
||||
// it's not already added and it's not the variable we're about to
|
||||
// eliminate.
|
||||
if(!jointFactor->keys().empty())
|
||||
columnIndex[jointFactor->front()].push_back(jointFactorI);
|
||||
|
||||
// Create the new node, although it's parent and children will not be
|
||||
// computed yet.
|
||||
// todo: use cluster factors instead of removedFactors here.
|
||||
nodesToAdd.push_back(typename Node::shared_ptr(new Node(removedFactors, conditional->key(),
|
||||
conditional->beginParents(), conditional->endParents())));
|
||||
}
|
||||
}
|
||||
|
||||
// Go over the collection in reverse elimination order
|
||||
// and add one node for every of the n variables.
|
||||
BOOST_REVERSE_FOREACH(const sharedNode& node, nodesToAdd) {
|
||||
add(node); }
|
||||
|
||||
if(debug) this->print("Completed elimination tree: ");
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
template<class> class _EliminationTreeTester;
|
||||
|
||||
/**
|
||||
* An elimination tree (see Gilbert01bit) associated with a factor graph and an ordering
|
||||
* is a cluster-tree where there is one node j for each variable, and the parent of each node
|
||||
|
|
@ -29,7 +31,7 @@ namespace gtsam {
|
|||
typedef typename Node::shared_ptr sharedNode;
|
||||
|
||||
// we typedef the following handy list of ordered factor graphs
|
||||
typedef std::pair<Symbol, FG> NamedGraph;
|
||||
typedef std::pair<varid_t, FG> NamedGraph;
|
||||
typedef std::list<NamedGraph> OrderedGraphs;
|
||||
|
||||
private:
|
||||
|
|
@ -45,10 +47,18 @@ namespace gtsam {
|
|||
* add a factor graph fragment with given frontal key into the tree. Assumes
|
||||
* parent node was already added (will throw exception if not).
|
||||
*/
|
||||
void add(const FG& fg, const Symbol& key, const IndexTable<Symbol>& indexTable);
|
||||
// void add(const FG& fg, varid_t key);
|
||||
|
||||
/**
|
||||
* Add a pre-created node by computing its parent and children.
|
||||
*/
|
||||
void add(const sharedNode& node);
|
||||
|
||||
public:
|
||||
|
||||
/** Default constructor creates an empty elimination tree. */
|
||||
EliminationTree() {}
|
||||
|
||||
/**
|
||||
* Constructor variant 1: from an ordered list of factor graphs
|
||||
* The list is supposed to be in elimination order, and for each
|
||||
|
|
@ -56,13 +66,28 @@ namespace gtsam {
|
|||
* This function assumes the input is correct (!) and will not check
|
||||
* whether the factors refer only to the correct set of variables.
|
||||
*/
|
||||
EliminationTree(const OrderedGraphs& orderedGraphs);
|
||||
// EliminationTree(const OrderedGraphs& orderedGraphs);
|
||||
|
||||
/**
|
||||
* Constructor variant 2: given a factor graph and the elimination ordering
|
||||
*/
|
||||
EliminationTree(FG& fg, const Ordering& ordering);
|
||||
EliminationTree(FG& fg);
|
||||
|
||||
friend class _EliminationTreeTester<FG>;
|
||||
|
||||
}; // EliminationTree
|
||||
|
||||
/** Class used to access private members for unit testing */
|
||||
template<class FG>
|
||||
struct _EliminationTreeTester {
|
||||
|
||||
EliminationTree<FG>& et_;
|
||||
|
||||
public:
|
||||
_EliminationTreeTester(EliminationTree<FG>& et) : et_(et) {}
|
||||
|
||||
typename EliminationTree<FG>::sharedNode& root() { return et_.ClusterTree<FG>::root_; }
|
||||
typename EliminationTree<FG>::Nodes& nodes() { return et_.nodes_; }
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -0,0 +1,63 @@
|
|||
/**
|
||||
* @file Factor-inl.h
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 1, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/inference/Factor.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
///* ************************************************************************* */
|
||||
//template<class ConditionalType>
|
||||
//Factor::Factor(const boost::shared_ptr<ConditionalType>& c) {
|
||||
// keys_.resize(c->parents().size()+1);
|
||||
// keys_[0] = c->key();
|
||||
// size_t j = 1;
|
||||
// BOOST_FOREACH(const varid_t parent, c->parents()) {
|
||||
// keys_[j++] = parent;
|
||||
// }
|
||||
// checkSorted();
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Derived> Factor::Factor(const Derived& c) : keys_(c.keys()), permuted_(c.permuted_) {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class KeyIterator> Factor::Factor(KeyIterator beginKey, KeyIterator endKey) :
|
||||
keys_(beginKey, endKey) { checkSorted(); }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraphType, class VariableIndexStorage>
|
||||
Factor::shared_ptr Factor::Combine(const FactorGraphType& factorGraph,
|
||||
const VariableIndex<VariableIndexStorage>& variableIndex, const std::vector<size_t>& factors,
|
||||
const std::vector<varid_t>& variables, const std::vector<std::vector<size_t> >& variablePositions) {
|
||||
|
||||
return shared_ptr(boost::make_shared<Factor>(variables.begin(), variables.end()));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class MapAllocator>
|
||||
Factor::shared_ptr Factor::Combine(const FactorGraph<Factor>& factors, const std::map<varid_t, std::vector<varid_t>, std::less<varid_t>, MapAllocator>& variableSlots) {
|
||||
typedef const std::map<varid_t, std::vector<varid_t>, std::less<varid_t>, MapAllocator> VariableSlots;
|
||||
typedef typeof(boost::lambda::bind(&VariableSlots::value_type::first, boost::lambda::_1)) FirstGetter;
|
||||
typedef boost::transform_iterator<
|
||||
FirstGetter, typename VariableSlots::const_iterator,
|
||||
varid_t, varid_t> IndexIterator;
|
||||
FirstGetter firstGetter(boost::lambda::bind(&VariableSlots::value_type::first, boost::lambda::_1));
|
||||
IndexIterator keysBegin(variableSlots.begin(), firstGetter);
|
||||
IndexIterator keysEnd(variableSlots.end(), firstGetter);
|
||||
return shared_ptr(new Factor(keysBegin, keysEnd));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* @file Factor.cpp
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 1, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/iterator/transform_iterator.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::lambda;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
Factor::Factor(const Factor& f) : keys_(f.keys_), permuted_(f.permuted_) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Factor::print(const std::string& s) const {
|
||||
cout << s << " ";
|
||||
BOOST_FOREACH(varid_t key, keys_) cout << " " << key;
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool Factor::equals(const Factor& other, double tol) const {
|
||||
return keys_ == other.keys_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Conditional::shared_ptr Factor::eliminateFirst() {
|
||||
assert(!keys_.empty());
|
||||
checkSorted();
|
||||
varid_t eliminated = keys_.front();
|
||||
keys_.erase(keys_.begin());
|
||||
return Conditional::shared_ptr(new Conditional(eliminated, keys_));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<BayesNet<Conditional> > Factor::eliminate(varid_t nFrontals) {
|
||||
assert(keys_.size() >= nFrontals);
|
||||
checkSorted();
|
||||
typename BayesNet<Conditional>::shared_ptr fragment(new BayesNet<Conditional>());
|
||||
const_iterator nextFrontal = this->begin();
|
||||
for(varid_t n = 0; n < nFrontals; ++n, ++nextFrontal)
|
||||
fragment->push_back(Conditional::shared_ptr(new Conditional(nextFrontal, const_iterator(this->end()), 1)));
|
||||
if(nFrontals > 0)
|
||||
keys_.assign(fragment->back()->beginParents(), fragment->back()->endParents());
|
||||
return fragment;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Factor::permuteWithInverse(const Permutation& inversePermutation) {
|
||||
this->permuted_.value = true;
|
||||
BOOST_FOREACH(varid_t& key, keys_) { key = inversePermutation[key]; }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -10,48 +10,159 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <list>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <boost/utility.hpp> // for noncopyable
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
#include <gtsam/inference/inference.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* A simple factor class to use in a factor graph.
|
||||
*
|
||||
* We make it noncopyable so we enforce the fact that factors are
|
||||
* kept in pointer containers. To be safe, you should make them
|
||||
* immutable, i.e., practicing functional programming. However, this
|
||||
* conflicts with efficiency as well, esp. in the case of incomplete
|
||||
* QR factorization. A solution is still being sought.
|
||||
*
|
||||
* A Factor is templated on a Config, for example VectorConfig is a configuration of
|
||||
* labeled vectors. This way, we can have factors that might be defined on discrete
|
||||
* variables, continuous ones, or a combination of both. It is up to the config to
|
||||
* provide the appropriate values at the appropriate time.
|
||||
*/
|
||||
template <class Config>
|
||||
class Factor : public Testable< Factor<Config> >
|
||||
{
|
||||
public:
|
||||
class Conditional;
|
||||
|
||||
virtual ~Factor() {};
|
||||
|
||||
/**
|
||||
* negative log probability
|
||||
*/
|
||||
virtual double error(const Config& c) const = 0;
|
||||
/**
|
||||
* A simple factor class to use in a factor graph.
|
||||
*
|
||||
* We make it noncopyable so we enforce the fact that factors are
|
||||
* kept in pointer containers. To be safe, you should make them
|
||||
* immutable, i.e., practicing functional programming. However, this
|
||||
* conflicts with efficiency as well, esp. in the case of incomplete
|
||||
* QR factorization. A solution is still being sought.
|
||||
*
|
||||
* A Factor is templated on a Config, for example VectorConfig is a configuration of
|
||||
* labeled vectors. This way, we can have factors that might be defined on discrete
|
||||
* variables, continuous ones, or a combination of both. It is up to the config to
|
||||
* provide the appropriate values at the appropriate time.
|
||||
*/
|
||||
class Factor : public Testable<Factor> {
|
||||
protected:
|
||||
|
||||
std::vector<varid_t> keys_;
|
||||
ValueWithDefault<bool,true> permuted_;
|
||||
|
||||
/** Internal check to make sure keys are sorted (invariant during elimination).
|
||||
* If NDEBUG is defined, this is empty and optimized out. */
|
||||
void checkSorted() const;
|
||||
|
||||
public:
|
||||
|
||||
typedef gtsam::Conditional Conditional;
|
||||
typedef boost::shared_ptr<Factor> shared_ptr;
|
||||
typedef std::vector<varid_t>::iterator iterator;
|
||||
typedef std::vector<varid_t>::const_iterator const_iterator;
|
||||
|
||||
/** Copy constructor */
|
||||
Factor(const Factor& f);
|
||||
|
||||
/** Construct from derived type */
|
||||
template<class Derived> Factor(const Derived& c);
|
||||
|
||||
/** Constructor from a collection of keys */
|
||||
template<class KeyIterator> Factor(KeyIterator beginKey, KeyIterator endKey);
|
||||
|
||||
/** Default constructor for I/O */
|
||||
Factor() : permuted_(false) {}
|
||||
|
||||
/** Construct unary factor */
|
||||
Factor(varid_t key) : keys_(1), permuted_(false) {
|
||||
keys_[0] = key; checkSorted(); }
|
||||
|
||||
/** Construct binary factor */
|
||||
Factor(varid_t key1, varid_t key2) : keys_(2), permuted_(false) {
|
||||
keys_[0] = key1; keys_[1] = key2; checkSorted(); }
|
||||
|
||||
/** Construct ternary factor */
|
||||
Factor(varid_t key1, varid_t key2, varid_t key3) : keys_(3), permuted_(false) {
|
||||
keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; checkSorted(); }
|
||||
|
||||
/** Construct 4-way factor */
|
||||
Factor(varid_t key1, varid_t key2, varid_t key3, varid_t key4) : keys_(4), permuted_(false) {
|
||||
keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; keys_[3] = key4; checkSorted(); }
|
||||
|
||||
/** Named constructor for combining a set of factors with pre-computed set of
|
||||
* variables. (Old style - will be removed when scalar elimination is
|
||||
* removed in favor of the EliminationTree). */
|
||||
template<class FactorGraphType, class VariableIndexStorage>
|
||||
static shared_ptr Combine(const FactorGraphType& factorGraph,
|
||||
const VariableIndex<VariableIndexStorage>& variableIndex, const std::vector<size_t>& factors,
|
||||
const std::vector<varid_t>& variables, const std::vector<std::vector<size_t> >& variablePositions);
|
||||
|
||||
/** Create a combined joint factor (new style for EliminationTree). */
|
||||
template<class MapAllocator>
|
||||
static shared_ptr Combine(const FactorGraph<Factor>& factors, const std::map<varid_t, std::vector<varid_t>, std::less<varid_t>, MapAllocator>& variableSlots);
|
||||
|
||||
/**
|
||||
* eliminate the first variable involved in this factor
|
||||
* @return a conditional on the eliminated variable
|
||||
*/
|
||||
boost::shared_ptr<Conditional> eliminateFirst();
|
||||
|
||||
/**
|
||||
* eliminate the first nFrontals frontal variables.
|
||||
*/
|
||||
boost::shared_ptr<BayesNet<Conditional> > eliminate(varid_t nFrontals = 1);
|
||||
|
||||
/**
|
||||
* Permutes the GaussianFactor, but for efficiency requires the permutation
|
||||
* to already be inverted.
|
||||
*/
|
||||
void permuteWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** iterators */
|
||||
const_iterator begin() const { return keys_.begin(); }
|
||||
const_iterator end() const { return keys_.end(); }
|
||||
|
||||
/** mutable iterators */
|
||||
iterator begin() { return keys_.begin(); }
|
||||
iterator end() { return keys_.end(); }
|
||||
|
||||
/** First key*/
|
||||
varid_t front() const { return keys_.front(); }
|
||||
|
||||
/** Last key */
|
||||
varid_t back() const { return keys_.back(); }
|
||||
|
||||
/** find */
|
||||
const_iterator find(varid_t key) const { return std::find(begin(), end(), key); }
|
||||
|
||||
/** print */
|
||||
void print(const std::string& s = "Factor") const;
|
||||
|
||||
/** check equality */
|
||||
bool equals(const Factor& other, double tol = 1e-9) const;
|
||||
|
||||
/**
|
||||
* return keys in order as created
|
||||
*/
|
||||
const std::vector<varid_t>& keys() const { return keys_; }
|
||||
|
||||
/**
|
||||
* @return the number of nodes the factor connects
|
||||
*/
|
||||
size_t size() const { return keys_.size(); }
|
||||
|
||||
protected:
|
||||
|
||||
/** Conditional makes internal use of a Factor for storage */
|
||||
friend class gtsam::Conditional;
|
||||
friend class GaussianConditional;
|
||||
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version) {
|
||||
ar & BOOST_SERIALIZATION_NVP(keys_);
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline void Factor::checkSorted() const {
|
||||
#ifndef NDEBUG
|
||||
for(size_t pos=1; pos<keys_.size(); ++pos)
|
||||
assert(keys_[pos-1] < keys_[pos]);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* return keys in preferred order
|
||||
*/
|
||||
virtual std::list<Symbol> keys() const = 0;
|
||||
|
||||
/**
|
||||
* @return the number of nodes the factor connects
|
||||
*/
|
||||
virtual std::size_t size() const = 0;
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@
|
|||
#include <boost/format.hpp>
|
||||
#include <boost/graph/prim_minimum_spanning_tree.hpp>
|
||||
#include <gtsam/colamd/ccolamd.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/inference/graph-inl.h>
|
||||
#include <gtsam/base/DSF.h>
|
||||
|
|
@ -35,38 +34,21 @@ using namespace std;
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
void FactorGraph<Factor>::associateFactor(size_t index,
|
||||
const sharedFactor& factor) {
|
||||
// rtodo: Can optimize factor->keys to return a const reference
|
||||
const list<Symbol> keys = factor->keys(); // get keys for factor
|
||||
|
||||
// for each key push i onto list
|
||||
BOOST_FOREACH(const Symbol& key, keys)
|
||||
indices_[key].push_back(index);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
template<class Conditional>
|
||||
FactorGraph<Factor>::FactorGraph(const BayesNet<Conditional>& bayesNet) {
|
||||
typename BayesNet<Conditional>::const_iterator it = bayesNet.begin();
|
||||
for (; it != bayesNet.end(); it++) {
|
||||
sharedFactor factor(new Factor(*it));
|
||||
sharedFactor factor(new Factor(**it));
|
||||
push_back(factor);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
void FactorGraph<Factor>::push_back(sharedFactor factor) {
|
||||
factors_.push_back(factor); // add the actual factor
|
||||
if (factor == NULL) return;
|
||||
|
||||
size_t i = factors_.size() - 1; // index of factor
|
||||
associateFactor(i, factor);
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// template<class Conditional>
|
||||
// FactorGraph::FactorGraph(const BayesNet<Conditional>& bayesNet, const Inference::Permutation& permutation) {
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
|
|
@ -114,289 +96,220 @@ namespace gtsam {
|
|||
return size_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
Ordering FactorGraph<Factor>::keys() const {
|
||||
Ordering keys;
|
||||
transform(indices_.begin(), indices_.end(), back_inserter(keys),
|
||||
_Select1st<Indices::value_type> ());
|
||||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** O(1) */
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
list<size_t> FactorGraph<Factor>::factors(const Symbol& key) const {
|
||||
return indices_.at(key);
|
||||
}
|
||||
// /* ************************************************************************* *
|
||||
// * Call colamd given a column-major symbolic matrix A
|
||||
// * @param n_col colamd arg 1: number of rows in A
|
||||
// * @param n_row colamd arg 2: number of columns in A
|
||||
// * @param nrNonZeros number of non-zero entries in A
|
||||
// * @param columns map from keys to a sparse column of non-zero row indices
|
||||
// * @param lastKeys set of keys that should appear last in the ordering
|
||||
// * ************************************************************************* */
|
||||
// static void colamd(int n_col, int n_row, int nrNonZeros, const map<varid_t, vector<int> >& columns,
|
||||
// Ordering& ordering, const set<varid_t>& lastKeys) {
|
||||
//
|
||||
// // Convert to compressed column major format colamd wants it in (== MATLAB format!)
|
||||
// int Alen = ccolamd_recommended(nrNonZeros, n_row, n_col); /* colamd arg 3: size of the array A */
|
||||
// int * A = new int[Alen]; /* colamd arg 4: row indices of A, of size Alen */
|
||||
// int * p = new int[n_col + 1]; /* colamd arg 5: column pointers of A, of size n_col+1 */
|
||||
// int * cmember = new int[n_col]; /* Constraint set of A, of size n_col */
|
||||
//
|
||||
// p[0] = 0;
|
||||
// int j = 1;
|
||||
// int count = 0;
|
||||
// typedef map<varid_t, vector<int> >::const_iterator iterator;
|
||||
// bool front_exists = false;
|
||||
// vector<varid_t> initialOrder;
|
||||
// for (iterator it = columns.begin(); it != columns.end(); it++) {
|
||||
// varid_t key = it->first;
|
||||
// const vector<int>& column = it->second;
|
||||
// initialOrder.push_back(key);
|
||||
// BOOST_FOREACH(int i, column)
|
||||
// A[count++] = i; // copy sparse column
|
||||
// p[j] = count; // column j (base 1) goes from A[j-1] to A[j]-1
|
||||
// if (lastKeys.find(key) == lastKeys.end()) {
|
||||
// cmember[j - 1] = 0;
|
||||
// front_exists = true;
|
||||
// } else {
|
||||
// cmember[j - 1] = 1; // force lastKeys to be at the end
|
||||
// }
|
||||
// j += 1;
|
||||
// }
|
||||
// if (!front_exists) { // if only 1 entries, set everything to 0...
|
||||
// for (int j = 0; j < n_col; j++)
|
||||
// cmember[j] = 0;
|
||||
// }
|
||||
//
|
||||
// double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */
|
||||
// int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */
|
||||
//
|
||||
// // call colamd, result will be in p *************************************************
|
||||
// /* TODO: returns (1) if successful, (0) otherwise*/
|
||||
// ::ccolamd(n_row, n_col, Alen, A, p, knobs, stats, cmember);
|
||||
// // **********************************************************************************
|
||||
// delete[] A; // delete symbolic A
|
||||
// delete[] cmember;
|
||||
//
|
||||
// // Convert elimination ordering in p to an ordering
|
||||
// for (int j = 0; j < n_col; j++)
|
||||
// ordering.push_back(initialOrder[p[j]]);
|
||||
// delete[] p; // delete colamd result vector
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// void FactorGraph<Factor>::getOrdering(Ordering& ordering,
|
||||
// const set<varid_t>& lastKeys,
|
||||
// boost::optional<const set<varid_t>&> scope) const {
|
||||
//
|
||||
// // A factor graph is really laid out in row-major format, each factor a row
|
||||
// // Below, we compute a symbolic matrix stored in sparse columns.
|
||||
// map<varid_t, vector<int> > columns; // map from keys to a sparse column of non-zero row indices
|
||||
// int nrNonZeros = 0; // number of non-zero entries
|
||||
// int n_row = 0; /* colamd arg 1: number of rows in A */
|
||||
//
|
||||
// // loop over all factors = rows
|
||||
// bool inserted;
|
||||
// bool hasInterested = scope.is_initialized();
|
||||
// BOOST_FOREACH(const sharedFactor& factor, factors_) {
|
||||
// if (factor == NULL) continue;
|
||||
// const vector<varid_t>& keys(factor->keys());
|
||||
// inserted = false;
|
||||
// BOOST_FOREACH(varid_t key, keys) {
|
||||
// if (!hasInterested || scope->find(key) != scope->end()) {
|
||||
// columns[key].push_back(n_row);
|
||||
// nrNonZeros++;
|
||||
// inserted = true;
|
||||
// }
|
||||
// }
|
||||
// if (inserted) n_row++;
|
||||
// }
|
||||
// int n_col = (int) (columns.size()); /* colamd arg 2: number of columns in A */
|
||||
// if (n_col != 0) colamd(n_col, n_row, nrNonZeros, columns, ordering, lastKeys);
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// Ordering FactorGraph<Factor>::getOrdering() const {
|
||||
// Ordering ordering;
|
||||
// set<varid_t> lastKeys;
|
||||
// getOrdering(ordering, lastKeys);
|
||||
// return ordering;
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// boost::shared_ptr<Ordering> FactorGraph<Factor>::getOrdering_() const {
|
||||
// boost::shared_ptr<Ordering> ordering(new Ordering);
|
||||
// set<varid_t> lastKeys;
|
||||
// getOrdering(*ordering, lastKeys);
|
||||
// return ordering;
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// Ordering FactorGraph<Factor>::getOrdering(const set<varid_t>& scope) const {
|
||||
// Ordering ordering;
|
||||
// set<varid_t> lastKeys;
|
||||
// getOrdering(ordering, lastKeys, scope);
|
||||
// return ordering;
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// Ordering FactorGraph<Factor>::getConstrainedOrdering(
|
||||
// const set<varid_t>& lastKeys) const {
|
||||
// Ordering ordering;
|
||||
// getOrdering(ordering, lastKeys);
|
||||
// return ordering;
|
||||
// }
|
||||
|
||||
/* ************************************************************************* *
|
||||
* Call colamd given a column-major symbolic matrix A
|
||||
* @param n_col colamd arg 1: number of rows in A
|
||||
* @param n_row colamd arg 2: number of columns in A
|
||||
* @param nrNonZeros number of non-zero entries in A
|
||||
* @param columns map from keys to a sparse column of non-zero row indices
|
||||
* @param lastKeys set of keys that should appear last in the ordering
|
||||
* ************************************************************************* */
|
||||
template<class Key>
|
||||
void colamd(int n_col, int n_row, int nrNonZeros,
|
||||
const map<Key, vector<int> >& columns, Ordering& ordering, const set<
|
||||
Symbol>& lastKeys) {
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor> template<class Key, class Factor2>
|
||||
// PredecessorMap<Key> FactorGraph<Factor>::findMinimumSpanningTree() const {
|
||||
//
|
||||
// SDGraph<Key> g = gtsam::toBoostGraph<FactorGraph<Factor> , Factor2, Key>(
|
||||
// *this);
|
||||
//
|
||||
// // find minimum spanning tree
|
||||
// vector<typename SDGraph<Key>::Vertex> p_map(boost::num_vertices(g));
|
||||
// prim_minimum_spanning_tree(g, &p_map[0]);
|
||||
//
|
||||
// // convert edge to string pairs
|
||||
// PredecessorMap<Key> tree;
|
||||
// typename SDGraph<Key>::vertex_iterator itVertex = boost::vertices(g).first;
|
||||
// typename vector<typename SDGraph<Key>::Vertex>::iterator vi;
|
||||
// for (vi = p_map.begin(); vi != p_map.end(); itVertex++, vi++) {
|
||||
// Key key = boost::get(boost::vertex_name, g, *itVertex);
|
||||
// Key parent = boost::get(boost::vertex_name, g, *vi);
|
||||
// tree.insert(key, parent);
|
||||
// }
|
||||
//
|
||||
// return tree;
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor> template<class Key, class Factor2>
|
||||
// void FactorGraph<Factor>::split(const PredecessorMap<Key>& tree, FactorGraph<
|
||||
// Factor>& Ab1, FactorGraph<Factor>& Ab2) const {
|
||||
//
|
||||
// BOOST_FOREACH(const sharedFactor& factor, factors_)
|
||||
// {
|
||||
// if (factor->keys().size() > 2) throw(invalid_argument(
|
||||
// "split: only support factors with at most two keys"));
|
||||
//
|
||||
// if (factor->keys().size() == 1) {
|
||||
// Ab1.push_back(factor);
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// boost::shared_ptr<Factor2> factor2 = boost::dynamic_pointer_cast<
|
||||
// Factor2>(factor);
|
||||
// if (!factor2) continue;
|
||||
//
|
||||
// Key key1 = factor2->key1();
|
||||
// Key key2 = factor2->key2();
|
||||
// // if the tree contains the key
|
||||
// if ((tree.find(key1) != tree.end()
|
||||
// && tree.find(key1)->second.compare(key2) == 0) || (tree.find(
|
||||
// key2) != tree.end() && tree.find(key2)->second.compare(key1)
|
||||
// == 0))
|
||||
// Ab1.push_back(factor2);
|
||||
// else
|
||||
// Ab2.push_back(factor2);
|
||||
// }
|
||||
// }
|
||||
|
||||
// Convert to compressed column major format colamd wants it in (== MATLAB format!)
|
||||
int Alen = ccolamd_recommended(nrNonZeros, n_row, n_col); /* colamd arg 3: size of the array A */
|
||||
int * A = new int[Alen]; /* colamd arg 4: row indices of A, of size Alen */
|
||||
int * p = new int[n_col + 1]; /* colamd arg 5: column pointers of A, of size n_col+1 */
|
||||
int * cmember = new int[n_col]; /* Constraint set of A, of size n_col */
|
||||
|
||||
p[0] = 0;
|
||||
int j = 1;
|
||||
int count = 0;
|
||||
typedef typename map<Key, vector<int> >::const_iterator iterator;
|
||||
bool front_exists = false;
|
||||
vector<Key> initialOrder;
|
||||
for (iterator it = columns.begin(); it != columns.end(); it++) {
|
||||
const Key& key = it->first;
|
||||
const vector<int>& column = it->second;
|
||||
initialOrder.push_back(key);
|
||||
BOOST_FOREACH(int i, column)
|
||||
A[count++] = i; // copy sparse column
|
||||
p[j] = count; // column j (base 1) goes from A[j-1] to A[j]-1
|
||||
if (lastKeys.find(key) == lastKeys.end()) {
|
||||
cmember[j - 1] = 0;
|
||||
front_exists = true;
|
||||
} else {
|
||||
cmember[j - 1] = 1; // force lastKeys to be at the end
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
if (!front_exists) { // if only 1 entries, set everything to 0...
|
||||
for (int j = 0; j < n_col; j++)
|
||||
cmember[j] = 0;
|
||||
}
|
||||
|
||||
double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */
|
||||
int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */
|
||||
|
||||
// call colamd, result will be in p *************************************************
|
||||
/* TODO: returns (1) if successful, (0) otherwise*/
|
||||
::ccolamd(n_row, n_col, Alen, A, p, knobs, stats, cmember);
|
||||
// **********************************************************************************
|
||||
delete[] A; // delete symbolic A
|
||||
delete[] cmember;
|
||||
|
||||
// Convert elimination ordering in p to an ordering
|
||||
for (int j = 0; j < n_col; j++)
|
||||
ordering.push_back(initialOrder[p[j]]);
|
||||
delete[] p; // delete colamd result vector
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
void FactorGraph<Factor>::getOrdering(Ordering& ordering,
|
||||
const set<Symbol>& lastKeys,
|
||||
boost::optional<const set<Symbol>&> scope) const {
|
||||
|
||||
// A factor graph is really laid out in row-major format, each factor a row
|
||||
// Below, we compute a symbolic matrix stored in sparse columns.
|
||||
map<Symbol, vector<int> > columns; // map from keys to a sparse column of non-zero row indices
|
||||
int nrNonZeros = 0; // number of non-zero entries
|
||||
int n_row = 0; /* colamd arg 1: number of rows in A */
|
||||
|
||||
// loop over all factors = rows
|
||||
bool inserted;
|
||||
bool hasInterested = scope.is_initialized();
|
||||
BOOST_FOREACH(const sharedFactor& factor, factors_) {
|
||||
if (factor == NULL) continue;
|
||||
list<Symbol> keys = factor->keys();
|
||||
inserted = false;
|
||||
BOOST_FOREACH(const Symbol& key, keys) {
|
||||
if (!hasInterested || scope->find(key) != scope->end()) {
|
||||
columns[key].push_back(n_row);
|
||||
nrNonZeros++;
|
||||
inserted = true;
|
||||
}
|
||||
}
|
||||
if (inserted) n_row++;
|
||||
}
|
||||
int n_col = (int) (columns.size()); /* colamd arg 2: number of columns in A */
|
||||
if (n_col != 0) colamd(n_col, n_row, nrNonZeros, columns, ordering, lastKeys);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
Ordering FactorGraph<Factor>::getOrdering() const {
|
||||
Ordering ordering;
|
||||
set<Symbol> lastKeys;
|
||||
getOrdering(ordering, lastKeys);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
boost::shared_ptr<Ordering> FactorGraph<Factor>::getOrdering_() const {
|
||||
boost::shared_ptr<Ordering> ordering(new Ordering);
|
||||
set<Symbol> lastKeys;
|
||||
getOrdering(*ordering, lastKeys);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
Ordering FactorGraph<Factor>::getOrdering(const set<Symbol>& scope) const {
|
||||
Ordering ordering;
|
||||
set<Symbol> lastKeys;
|
||||
getOrdering(ordering, lastKeys, scope);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
Ordering FactorGraph<Factor>::getConstrainedOrdering(
|
||||
const set<Symbol>& lastKeys) const {
|
||||
Ordering ordering;
|
||||
getOrdering(ordering, lastKeys);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor> template<class Key, class Factor2>
|
||||
PredecessorMap<Key> FactorGraph<Factor>::findMinimumSpanningTree() const {
|
||||
|
||||
SDGraph<Key> g = gtsam::toBoostGraph<FactorGraph<Factor> , Factor2, Key>(
|
||||
*this);
|
||||
|
||||
// find minimum spanning tree
|
||||
vector<typename SDGraph<Key>::Vertex> p_map(boost::num_vertices(g));
|
||||
prim_minimum_spanning_tree(g, &p_map[0]);
|
||||
|
||||
// convert edge to string pairs
|
||||
PredecessorMap<Key> tree;
|
||||
typename SDGraph<Key>::vertex_iterator itVertex = boost::vertices(g).first;
|
||||
typename vector<typename SDGraph<Key>::Vertex>::iterator vi;
|
||||
for (vi = p_map.begin(); vi != p_map.end(); itVertex++, vi++) {
|
||||
Key key = boost::get(boost::vertex_name, g, *itVertex);
|
||||
Key parent = boost::get(boost::vertex_name, g, *vi);
|
||||
tree.insert(key, parent);
|
||||
}
|
||||
|
||||
return tree;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor> template<class Key, class Factor2>
|
||||
void FactorGraph<Factor>::split(const PredecessorMap<Key>& tree, FactorGraph<
|
||||
Factor>& Ab1, FactorGraph<Factor>& Ab2) const {
|
||||
|
||||
BOOST_FOREACH(const sharedFactor& factor, factors_)
|
||||
{
|
||||
if (factor->keys().size() > 2) throw(invalid_argument(
|
||||
"split: only support factors with at most two keys"));
|
||||
|
||||
if (factor->keys().size() == 1) {
|
||||
Ab1.push_back(factor);
|
||||
continue;
|
||||
}
|
||||
|
||||
boost::shared_ptr<Factor2> factor2 = boost::dynamic_pointer_cast<
|
||||
Factor2>(factor);
|
||||
if (!factor2) continue;
|
||||
|
||||
Key key1 = factor2->key1();
|
||||
Key key2 = factor2->key2();
|
||||
// if the tree contains the key
|
||||
if ((tree.find(key1) != tree.end()
|
||||
&& tree.find(key1)->second.compare(key2) == 0) || (tree.find(
|
||||
key2) != tree.end() && tree.find(key2)->second.compare(key1)
|
||||
== 0))
|
||||
Ab1.push_back(factor2);
|
||||
else
|
||||
Ab2.push_back(factor2);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
std::pair<FactorGraph<Factor> , FactorGraph<Factor> > FactorGraph<Factor>::splitMinimumSpanningTree() const {
|
||||
// create an empty factor graph T (tree) and factor graph C (constraints)
|
||||
FactorGraph<Factor> T;
|
||||
FactorGraph<Factor> C;
|
||||
DSF<Symbol> dsf(keys());
|
||||
|
||||
// while G is nonempty and T is not yet spanning
|
||||
for (size_t i = 0; i < size(); i++) {
|
||||
const sharedFactor& f = factors_[i];
|
||||
|
||||
// retrieve the labels of all the keys
|
||||
set<Symbol> labels;
|
||||
BOOST_FOREACH(const Symbol& key, f->keys())
|
||||
labels.insert(dsf.findSet(key));
|
||||
|
||||
// if that factor connects two different trees, then add it to T
|
||||
if (labels.size() > 1) {
|
||||
T.push_back(f);
|
||||
set<Symbol>::const_iterator it = labels.begin();
|
||||
Symbol root = *it;
|
||||
for (it++; it != labels.end(); it++)
|
||||
dsf = dsf.makeUnion(root, *it);
|
||||
} else
|
||||
// otherwise add that factor to C
|
||||
C.push_back(f);
|
||||
}
|
||||
return make_pair(T, C);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor> void FactorGraph<Factor>::checkGraphConsistency() const {
|
||||
// Consistency check for debugging
|
||||
|
||||
// Make sure each factor is listed in its variables index lists
|
||||
for (size_t i = 0; i < factors_.size(); i++) {
|
||||
if (factors_[i] == NULL) {
|
||||
cout << "** Warning: factor " << i << " is NULL" << endl;
|
||||
} else {
|
||||
// Get involved variables
|
||||
list<Symbol> keys = factors_[i]->keys();
|
||||
|
||||
// Make sure each involved variable is listed as being associated with this factor
|
||||
BOOST_FOREACH(const Symbol& var, keys)
|
||||
{
|
||||
if (std::find(indices_.at(var).begin(), indices_.at(var).end(),
|
||||
i) == indices_.at(var).end()) cout
|
||||
<< "*** Factor graph inconsistency: " << (string) var
|
||||
<< " is not mapped to factor " << i << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure each factor listed for a variable actually involves that variable
|
||||
BOOST_FOREACH(const SymbolMap<list<size_t> >::value_type& var, indices_)
|
||||
{
|
||||
BOOST_FOREACH(size_t i, var.second)
|
||||
{
|
||||
if (i >= factors_.size()) {
|
||||
cout << "*** Factor graph inconsistency: "
|
||||
<< (string) var.first << " lists factor " << i
|
||||
<< " but the graph does not contain this many factors."
|
||||
<< endl;
|
||||
}
|
||||
if (factors_[i] == NULL) {
|
||||
cout << "*** Factor graph inconsistency: "
|
||||
<< (string) var.first << " lists factor " << i
|
||||
<< " but this factor is set to NULL." << endl;
|
||||
}
|
||||
list<Symbol> keys = factors_[i]->keys();
|
||||
if (std::find(keys.begin(), keys.end(), var.first)
|
||||
== keys.end()) {
|
||||
cout << "*** Factor graph inconsistency: "
|
||||
<< (string) var.first << " lists factor " << i
|
||||
<< " but this factor does not involve this variable."
|
||||
<< endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// std::pair<FactorGraph<Factor> , FactorGraph<Factor> > FactorGraph<Factor>::splitMinimumSpanningTree() const {
|
||||
// // create an empty factor graph T (tree) and factor graph C (constraints)
|
||||
// FactorGraph<Factor> T;
|
||||
// FactorGraph<Factor> C;
|
||||
// DSF<Symbol> dsf(keys());
|
||||
//
|
||||
// // while G is nonempty and T is not yet spanning
|
||||
// for (size_t i = 0; i < size(); i++) {
|
||||
// const sharedFactor& f = factors_[i];
|
||||
//
|
||||
// // retrieve the labels of all the keys
|
||||
// set<Symbol> labels;
|
||||
// BOOST_FOREACH(const Symbol& key, f->keys())
|
||||
// labels.insert(dsf.findSet(key));
|
||||
//
|
||||
// // if that factor connects two different trees, then add it to T
|
||||
// if (labels.size() > 1) {
|
||||
// T.push_back(f);
|
||||
// set<Symbol>::const_iterator it = labels.begin();
|
||||
// Symbol root = *it;
|
||||
// for (it++; it != labels.end(); it++)
|
||||
// dsf = dsf.makeUnion(root, *it);
|
||||
// } else
|
||||
// // otherwise add that factor to C
|
||||
// C.push_back(f);
|
||||
// }
|
||||
// return make_pair(T, C);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
|
|
@ -404,83 +317,46 @@ namespace gtsam {
|
|||
if (index >= factors_.size()) throw invalid_argument(boost::str(
|
||||
boost::format("Factor graph does not contain a factor with index %d.")
|
||||
% index));
|
||||
//if(factors_[index] == NULL)
|
||||
// throw invalid_argument(boost::str(boost::format(
|
||||
// "Factor with index %d is NULL." % index)));
|
||||
|
||||
if (factors_[index] != NULL) {
|
||||
// Remove this factor from its variables' index lists
|
||||
BOOST_FOREACH(const Symbol& key, factors_[index]->keys())
|
||||
{
|
||||
indices_.at(key).remove(index);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace the factor
|
||||
factors_[index] = factor;
|
||||
associateFactor(index, factor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** find all non-NULL factors for a variable, then set factors to NULL */
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
vector<boost::shared_ptr<Factor> > FactorGraph<Factor>::findAndRemoveFactors(
|
||||
const Symbol& key) {
|
||||
|
||||
// find all factor indices associated with the key
|
||||
Indices::const_iterator it = indices_.find(key);
|
||||
vector<sharedFactor> found;
|
||||
if (it != indices_.end()) {
|
||||
const list<size_t>& factorsAssociatedWithKey = it->second;
|
||||
BOOST_FOREACH(const size_t& i, factorsAssociatedWithKey) {
|
||||
sharedFactor& fi = factors_.at(i); // throws exception !
|
||||
if (fi == NULL) continue; // skip NULL factors
|
||||
found.push_back(fi); // add to found
|
||||
fi.reset(); // set factor to NULL == remove(i)
|
||||
}
|
||||
}
|
||||
indices_.erase(key);
|
||||
|
||||
return found;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
std::pair<FactorGraph<Factor> , set<Symbol> > FactorGraph<Factor>::removeSingletons() {
|
||||
FactorGraph<Factor> singletonGraph;
|
||||
set<Symbol> singletons;
|
||||
|
||||
while (true) {
|
||||
// find all the singleton variables
|
||||
Ordering new_singletons;
|
||||
Symbol key;
|
||||
list<size_t> indices;
|
||||
BOOST_FOREACH(boost::tie(key, indices), indices_)
|
||||
{
|
||||
// find out the number of factors associated with the current key
|
||||
size_t numValidFactors = 0;
|
||||
BOOST_FOREACH(const size_t& i, indices)
|
||||
if (factors_[i] != NULL) numValidFactors++;
|
||||
|
||||
if (numValidFactors == 1) {
|
||||
new_singletons.push_back(key);
|
||||
BOOST_FOREACH(const size_t& i, indices)
|
||||
if (factors_[i] != NULL) singletonGraph.push_back(
|
||||
factors_[i]);
|
||||
}
|
||||
}
|
||||
singletons.insert(new_singletons.begin(), new_singletons.end());
|
||||
|
||||
BOOST_FOREACH(const Symbol& singleton, new_singletons)
|
||||
findAndRemoveFactors(singleton);
|
||||
|
||||
// exit when there are no more singletons
|
||||
if (new_singletons.empty()) break;
|
||||
}
|
||||
|
||||
return make_pair(singletonGraph, singletons);
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor>
|
||||
// std::pair<FactorGraph<Factor> , set<varid_t> > FactorGraph<Factor>::removeSingletons() {
|
||||
// FactorGraph<Factor> singletonGraph;
|
||||
// set<varid_t> singletons;
|
||||
//
|
||||
// while (true) {
|
||||
// // find all the singleton variables
|
||||
// Ordering new_singletons;
|
||||
// varid_t key;
|
||||
// list<size_t> indices;
|
||||
// BOOST_FOREACH(boost::tie(key, indices), indices_)
|
||||
// {
|
||||
// // find out the number of factors associated with the current key
|
||||
// size_t numValidFactors = 0;
|
||||
// BOOST_FOREACH(const size_t& i, indices)
|
||||
// if (factors_[i] != NULL) numValidFactors++;
|
||||
//
|
||||
// if (numValidFactors == 1) {
|
||||
// new_singletons.push_back(key);
|
||||
// BOOST_FOREACH(const size_t& i, indices)
|
||||
// if (factors_[i] != NULL) singletonGraph.push_back(
|
||||
// factors_[i]);
|
||||
// }
|
||||
// }
|
||||
// singletons.insert(new_singletons.begin(), new_singletons.end());
|
||||
//
|
||||
// BOOST_FOREACH(const varid_t& singleton, new_singletons)
|
||||
// findAndRemoveFactors(singleton);
|
||||
//
|
||||
// // exit when there are no more singletons
|
||||
// if (new_singletons.empty()) break;
|
||||
// }
|
||||
//
|
||||
// return make_pair(singletonGraph, singletons);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
|
|
@ -495,19 +371,19 @@ namespace gtsam {
|
|||
return fg;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor> boost::shared_ptr<Factor> removeAndCombineFactors(
|
||||
FactorGraph<Factor>& factorGraph, const Symbol& key) {
|
||||
|
||||
// find and remove the factors associated with key
|
||||
vector<boost::shared_ptr<Factor> > found = factorGraph.findAndRemoveFactors(key);
|
||||
|
||||
// make a vector out of them and create a new factor
|
||||
boost::shared_ptr<Factor> new_factor(new Factor(found));
|
||||
|
||||
// return it
|
||||
return new_factor;
|
||||
}
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor> boost::shared_ptr<Factor> removeAndCombineFactors(
|
||||
// FactorGraph<Factor>& factorGraph, const varid_t& key) {
|
||||
//
|
||||
// // find and remove the factors associated with key
|
||||
// vector<boost::shared_ptr<Factor> > found = factorGraph.findAndRemoveFactors(key);
|
||||
//
|
||||
// // make a vector out of them and create a new factor
|
||||
// boost::shared_ptr<Factor> new_factor(new Factor(found));
|
||||
//
|
||||
// // return it
|
||||
// return new_factor;
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -11,29 +11,31 @@
|
|||
#pragma once
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
//#include <boost/serialization/list.hpp>
|
||||
//#include <boost/serialization/vector.hpp>
|
||||
//#include <boost/serialization/shared_ptr.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/graph.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
|
||||
/**
|
||||
* A factor graph is a bipartite graph with factor nodes connected to variable nodes.
|
||||
* In this class, however, only factor nodes are kept around.
|
||||
*
|
||||
* Templated on the type of factors and configuration.
|
||||
*/
|
||||
template<class Factor> class FactorGraph: public Testable<FactorGraph<Factor> > {
|
||||
template<class Factor>
|
||||
class FactorGraph: public Testable<FactorGraph<Factor> > {
|
||||
public:
|
||||
typedef Factor factor_type;
|
||||
typedef boost::shared_ptr<FactorGraph<Factor> > shared_ptr;
|
||||
typedef typename boost::shared_ptr<Factor> sharedFactor;
|
||||
typedef typename std::vector<sharedFactor>::iterator iterator;
|
||||
typedef typename std::vector<sharedFactor>::const_iterator const_iterator;
|
||||
|
|
@ -43,19 +45,12 @@ namespace gtsam {
|
|||
/** Collection of factors */
|
||||
std::vector<sharedFactor> factors_;
|
||||
|
||||
/** For each variable a list of factor indices connected to it */
|
||||
typedef SymbolMap<std::list<size_t> > Indices;
|
||||
Indices indices_;
|
||||
|
||||
/** Associate factor index with the variables connected to the factor */
|
||||
void associateFactor(size_t index, const sharedFactor& factor);
|
||||
|
||||
/**
|
||||
* Return an ordering in first argument, potentially using a set of
|
||||
* keys that need to appear last, and potentially restricting scope
|
||||
*/
|
||||
void getOrdering(Ordering& ordering, const std::set<Symbol>& lastKeys,
|
||||
boost::optional<const std::set<Symbol>&> scope = boost::none) const;
|
||||
// /**
|
||||
// * Return an ordering in first argument, potentially using a set of
|
||||
// * keys that need to appear last, and potentially restricting scope
|
||||
// */
|
||||
// void getOrdering(Ordering& ordering, const std::set<varid_t>& lastKeys,
|
||||
// boost::optional<const std::set<varid_t>&> scope = boost::none) const;
|
||||
|
||||
public:
|
||||
|
||||
|
|
@ -68,8 +63,17 @@ namespace gtsam {
|
|||
template<class Conditional>
|
||||
FactorGraph(const BayesNet<Conditional>& bayesNet);
|
||||
|
||||
// /** convert from Bayes net while permuting at the same time */
|
||||
// template<class Conditional>
|
||||
// FactorGraph(const BayesNet<Conditional>& bayesNet, const Inference::Permutation& permutation);
|
||||
|
||||
/** convert from a derived type */
|
||||
template<class Derived>
|
||||
FactorGraph(const Derived& factorGraph);
|
||||
|
||||
/** Add a factor */
|
||||
void push_back(sharedFactor factor);
|
||||
template<class DerivedFactor>
|
||||
void push_back(const boost::shared_ptr<DerivedFactor>& factor);
|
||||
|
||||
/** push back many factors */
|
||||
void push_back(const FactorGraph<Factor>& factors);
|
||||
|
|
@ -82,12 +86,21 @@ namespace gtsam {
|
|||
/** Check equality */
|
||||
bool equals(const FactorGraph& fg, double tol = 1e-9) const;
|
||||
|
||||
/** const cast to the underlying vector of factors */
|
||||
operator const std::vector<sharedFactor>&() const { return factors_; }
|
||||
|
||||
/** STL begin and end, so we can use BOOST_FOREACH */
|
||||
inline const_iterator begin() const { return factors_.begin();}
|
||||
inline const_iterator end() const { return factors_.end(); }
|
||||
|
||||
/** Get a specific factor by index */
|
||||
inline sharedFactor operator[](size_t i) const {return factors_[i];}
|
||||
inline sharedFactor operator[](size_t i) const { assert(i<factors_.size()); return factors_[i]; }
|
||||
|
||||
/** Get the first factor */
|
||||
inline sharedFactor front() const { return factors_.front(); }
|
||||
|
||||
/** Get the last factor */
|
||||
inline sharedFactor back() const { return factors_.back(); }
|
||||
|
||||
/** return the number of factors and NULLS */
|
||||
inline size_t size() const { return factors_.size();}
|
||||
|
|
@ -95,55 +108,41 @@ namespace gtsam {
|
|||
/** return the number valid factors */
|
||||
size_t nrFactors() const;
|
||||
|
||||
/** return keys in some random order */
|
||||
Ordering keys() const;
|
||||
// /** return keys in some random order */
|
||||
// Ordering keys() const;
|
||||
|
||||
/** return the number of the keys */
|
||||
inline size_t nrKeys() const {return indices_.size(); };
|
||||
// /**
|
||||
// * Compute colamd ordering, including I/O, constrained ordering,
|
||||
// * and shared pointer version.
|
||||
// */
|
||||
// Ordering getOrdering() const;
|
||||
// boost::shared_ptr<Ordering> getOrdering_() const;
|
||||
// Ordering getOrdering(const std::set<varid_t>& scope) const;
|
||||
// Ordering getConstrainedOrdering(const std::set<varid_t>& lastKeys) const;
|
||||
|
||||
/** Check whether a factor with this variable exists */
|
||||
bool involves(const Symbol& key) const {
|
||||
return !(indices_.find(key)==indices_.end());
|
||||
}
|
||||
// /**
|
||||
// * find the minimum spanning tree using boost graph library
|
||||
// */
|
||||
// template<class Key, class Factor2> PredecessorMap<Key>
|
||||
// SL-NEEDED? findMinimumSpanningTree() const;
|
||||
//
|
||||
// /**
|
||||
// * Split the graph into two parts: one corresponds to the given spanning tree,
|
||||
// * and the other corresponds to the rest of the factors
|
||||
// */
|
||||
// SL-NEEDED? template<class Key, class Factor2> void split(const PredecessorMap<Key>& tree,
|
||||
// FactorGraph<Factor>& Ab1, FactorGraph<Factor>& Ab2) const;
|
||||
|
||||
/**
|
||||
* Return indices for all factors that involve the given node
|
||||
* @param key the key for the given node
|
||||
*/
|
||||
std::list<size_t> factors(const Symbol& key) const;
|
||||
// /**
|
||||
// * find the minimum spanning tree using DSF
|
||||
// */
|
||||
// std::pair<FactorGraph<Factor> , FactorGraph<Factor> >
|
||||
// SL-NEEDED? splitMinimumSpanningTree() const;
|
||||
|
||||
/**
|
||||
* Compute colamd ordering, including I/O, constrained ordering,
|
||||
* and shared pointer version.
|
||||
*/
|
||||
Ordering getOrdering() const;
|
||||
boost::shared_ptr<Ordering> getOrdering_() const;
|
||||
Ordering getOrdering(const std::set<Symbol>& scope) const;
|
||||
Ordering getConstrainedOrdering(const std::set<Symbol>& lastKeys) const;
|
||||
|
||||
/**
|
||||
* find the minimum spanning tree using boost graph library
|
||||
*/
|
||||
template<class Key, class Factor2> PredecessorMap<Key>
|
||||
findMinimumSpanningTree() const;
|
||||
|
||||
/**
|
||||
* Split the graph into two parts: one corresponds to the given spanning tree,
|
||||
* and the other corresponds to the rest of the factors
|
||||
*/
|
||||
template<class Key, class Factor2> void split(const PredecessorMap<Key>& tree,
|
||||
FactorGraph<Factor>& Ab1, FactorGraph<Factor>& Ab2) const;
|
||||
|
||||
/**
|
||||
* find the minimum spanning tree using DSF
|
||||
*/
|
||||
std::pair<FactorGraph<Factor> , FactorGraph<Factor> >
|
||||
splitMinimumSpanningTree() const;
|
||||
|
||||
/**
|
||||
* Check consistency of the index map, useful for debugging
|
||||
*/
|
||||
void checkGraphConsistency() const;
|
||||
// /**
|
||||
// * Check consistency of the index map, useful for debugging
|
||||
// */
|
||||
// void checkGraphConsistency() const;
|
||||
|
||||
/** ----------------- Modifying Factor Graphs ---------------------------- */
|
||||
|
||||
|
|
@ -151,21 +150,27 @@ namespace gtsam {
|
|||
inline iterator begin() { return factors_.begin();}
|
||||
inline iterator end() { return factors_.end(); }
|
||||
|
||||
/**
|
||||
* Reserve space for the specified number of factors if you know in
|
||||
* advance how many there will be (works like std::vector::reserve).
|
||||
*/
|
||||
void reserve(size_t size) { factors_.reserve(size); }
|
||||
|
||||
/** delete factor without re-arranging indexes by inserting a NULL pointer */
|
||||
inline void remove(size_t i) { factors_[i].reset();}
|
||||
|
||||
/** replace a factor by index */
|
||||
void replace(size_t index, sharedFactor factor);
|
||||
|
||||
/**
|
||||
* Find all the factors that involve the given node and remove them
|
||||
* from the factor graph
|
||||
* @param key the key for the given node
|
||||
*/
|
||||
std::vector<sharedFactor> findAndRemoveFactors(const Symbol& key);
|
||||
|
||||
/** remove singleton variables and the related factors */
|
||||
std::pair<FactorGraph<Factor>, std::set<Symbol> > removeSingletons();
|
||||
// /**
|
||||
// * Find all the factors that involve the given node and remove them
|
||||
// * from the factor graph
|
||||
// * @param key the key for the given node
|
||||
// */
|
||||
// std::vector<sharedFactor> findAndRemoveFactors(varid_t key);
|
||||
//
|
||||
// /** remove singleton variables and the related factors */
|
||||
// std::pair<FactorGraph<Factor>, std::set<varid_t> > removeSingletons();
|
||||
|
||||
private:
|
||||
|
||||
|
|
@ -174,7 +179,6 @@ namespace gtsam {
|
|||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version) {
|
||||
ar & BOOST_SERIALIZATION_NVP(factors_);
|
||||
ar & BOOST_SERIALIZATION_NVP(indices_);
|
||||
}
|
||||
}; // FactorGraph
|
||||
|
||||
|
|
@ -187,14 +191,44 @@ namespace gtsam {
|
|||
template<class Factor>
|
||||
FactorGraph<Factor> combine(const FactorGraph<Factor>& fg1, const FactorGraph<Factor>& fg2);
|
||||
|
||||
/**
|
||||
* Extract and combine all the factors that involve a given node
|
||||
* Put this here as not all Factors have a combine constructor
|
||||
* @param key the key for the given node
|
||||
* @return the combined linear factor
|
||||
*/
|
||||
template<class Factor> boost::shared_ptr<Factor>
|
||||
removeAndCombineFactors(FactorGraph<Factor>& factorGraph, const Symbol& key);
|
||||
// /**
|
||||
// * Extract and combine all the factors that involve a given node
|
||||
// * Put this here as not all Factors have a combine constructor
|
||||
// * @param key the key for the given node
|
||||
// * @return the combined linear factor
|
||||
// */
|
||||
// template<class Factor> boost::shared_ptr<Factor>
|
||||
// removeAndCombineFactors(FactorGraph<Factor>& factorGraph, const varid_t& key);
|
||||
|
||||
|
||||
/**
|
||||
* These functions are defined here because they are templated on an
|
||||
* additional parameter. Putting them in the -inl.h file would mean these
|
||||
* would have to be explicitly instantiated for any possible derived factor
|
||||
* type.
|
||||
*/
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
template<class Derived>
|
||||
FactorGraph<Factor>::FactorGraph(const Derived& factorGraph) {
|
||||
factors_.reserve(factorGraph.size());
|
||||
BOOST_FOREACH(const typename Derived::sharedFactor& factor, factorGraph) {
|
||||
this->push_back(factor);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
template<class DerivedFactor>
|
||||
inline void FactorGraph<Factor>::push_back(const boost::shared_ptr<DerivedFactor>& factor) {
|
||||
#ifndef NDEBUG
|
||||
factors_.push_back(boost::dynamic_pointer_cast<Factor>(factor)); // add the actual factor
|
||||
#else
|
||||
factors_.push_back(boost::static_pointer_cast<Factor>(factor));
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
} // namespace gtsam
|
||||
|
||||
|
|
|
|||
|
|
@ -27,55 +27,37 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
void ISAM<Conditional>::update_internal(const FactorGraph<Factor>& newFactors, Cliques& orphans) {
|
||||
template<class FactorGraph>
|
||||
void ISAM<Conditional>::update_internal(const FactorGraph& newFactors, Cliques& orphans) {
|
||||
|
||||
// Remove the contaminated part of the Bayes tree
|
||||
BayesNet<Conditional> bn;
|
||||
removeTop(newFactors.keys(), bn, orphans);
|
||||
FactorGraph<Factor> factors(bn);
|
||||
FactorGraph factors(bn);
|
||||
|
||||
// add the factors themselves
|
||||
factors.push_back(newFactors);
|
||||
|
||||
// create an ordering for the new and contaminated factors
|
||||
Ordering ordering;
|
||||
#ifndef SORT_KEYS
|
||||
ordering = factors.getOrdering();
|
||||
#else
|
||||
list<Symbol> keys = factors.keys();
|
||||
keys.sort(); // todo: correct sorting order?
|
||||
ordering = keys;
|
||||
#endif
|
||||
|
||||
// Create Index from ordering
|
||||
IndexTable<Symbol> index(ordering);
|
||||
|
||||
// eliminate into a Bayes net
|
||||
BayesNet<Conditional> bayesNet = eliminate<Factor, Conditional>(factors,ordering);
|
||||
typename BayesNet<Conditional>::shared_ptr bayesNet = Inference::Eliminate(factors);
|
||||
|
||||
// insert conditionals back in, straight into the topless bayesTree
|
||||
typename BayesNet<Conditional>::const_reverse_iterator rit;
|
||||
for ( rit=bayesNet.rbegin(); rit != bayesNet.rend(); ++rit )
|
||||
this->insert(*rit, index);
|
||||
for ( rit=bayesNet->rbegin(); rit != bayesNet->rend(); ++rit )
|
||||
this->insert(*rit);
|
||||
|
||||
// add orphans to the bottom of the new tree
|
||||
BOOST_FOREACH(sharedClique orphan, orphans) {
|
||||
|
||||
Symbol parentRepresentative = findParentClique(orphan->separator_, index);
|
||||
sharedClique parent = (*this)[parentRepresentative];
|
||||
|
||||
parent->children_ += orphan;
|
||||
orphan->parent_ = parent; // set new parent!
|
||||
this->insert(orphan);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
template<class Conditional>
|
||||
template<class Factor>
|
||||
void ISAM<Conditional>::update(const FactorGraph<Factor>& newFactors) {
|
||||
template<class FactorGraph>
|
||||
void ISAM<Conditional>::update(const FactorGraph& newFactors) {
|
||||
Cliques orphans;
|
||||
this->update_internal<Factor>(newFactors, orphans);
|
||||
this->update_internal(newFactors, orphans);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,8 +11,10 @@
|
|||
#include <map>
|
||||
#include <list>
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
//#include <boost/serialization/list.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
#include <stdexcept>
|
||||
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
|
@ -33,10 +35,6 @@ namespace gtsam {
|
|||
/** Create a Bayes Tree from a Bayes Net */
|
||||
ISAM(const BayesNet<Conditional>& bayesNet);
|
||||
|
||||
/** Destructor */
|
||||
virtual ~ISAM() {
|
||||
}
|
||||
|
||||
typedef typename BayesTree<Conditional>::sharedClique sharedClique;
|
||||
|
||||
typedef typename BayesTree<Conditional>::Cliques Cliques;
|
||||
|
|
@ -44,10 +42,10 @@ namespace gtsam {
|
|||
/**
|
||||
* iSAM. (update_internal provides access to list of orphans for drawing purposes)
|
||||
*/
|
||||
template<class Factor>
|
||||
void update_internal(const FactorGraph<Factor>& newFactors, Cliques& orphans);
|
||||
template<class Factor>
|
||||
void update(const FactorGraph<Factor>& newFactors);
|
||||
template<class FactorGraph>
|
||||
void update_internal(const FactorGraph& newFactors, Cliques& orphans);
|
||||
template<class FactorGraph>
|
||||
void update(const FactorGraph& newFactors);
|
||||
|
||||
}; // ISAM
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -15,17 +15,18 @@
|
|||
//#include <boost/serialization/list.hpp>
|
||||
#include <stdexcept>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/Ordering.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/BayesTree.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
typedef SymbolMap<GaussianFactor::shared_ptr> CachedFactors;
|
||||
//typedef std::vector<GaussianFactor::shared_ptr> CachedFactors;
|
||||
|
||||
template<class Conditional, class Config>
|
||||
class ISAM2: public BayesTree<Conditional> {
|
||||
|
|
@ -35,22 +36,40 @@ protected:
|
|||
// current linearization point
|
||||
Config theta_;
|
||||
|
||||
// VariableIndex lets us look up factors by involved variable and keeps track of dimensions
|
||||
typedef GaussianVariableIndex<VariableIndexStorage_deque> VariableIndexType;
|
||||
VariableIndexType variableIndex_;
|
||||
|
||||
// the linear solution, an update to the estimate in theta
|
||||
VectorConfig delta_;
|
||||
VectorConfig deltaUnpermuted_;
|
||||
|
||||
// The residual permutation through which the deltaUnpermuted_ is
|
||||
// referenced. Permuting the VectorConfig is slow, so for performance the
|
||||
// permutation is applied at access time instead of to the VectorConfig
|
||||
// itself.
|
||||
Permuted<VectorConfig> delta_;
|
||||
|
||||
// for keeping all original nonlinear factors
|
||||
NonlinearFactorGraph<Config> nonlinearFactors_;
|
||||
|
||||
// The "ordering" allows converting Symbols to varid_t (integer) keys. We
|
||||
// keep it up to date as we add and reorder variables.
|
||||
Ordering ordering_;
|
||||
|
||||
// cached intermediate results for restarting computation in the middle
|
||||
CachedFactors cached_;
|
||||
// CachedFactors cached_;
|
||||
|
||||
#ifndef NDEBUG
|
||||
std::vector<bool> lastRelinVariables_;
|
||||
#endif
|
||||
|
||||
public:
|
||||
|
||||
/** Create an empty Bayes Tree */
|
||||
ISAM2();
|
||||
|
||||
/** Create a Bayes Tree from a Bayes Net */
|
||||
ISAM2(const NonlinearFactorGraph<Config>& fg, const Ordering& ordering, const Config& config);
|
||||
// /** Create a Bayes Tree from a Bayes Net */
|
||||
// ISAM2(const NonlinearFactorGraph<Config>& fg, const Ordering& ordering, const Config& config);
|
||||
|
||||
/** Destructor */
|
||||
virtual ~ISAM2() {}
|
||||
|
|
@ -66,16 +85,20 @@ public:
|
|||
double wildfire_threshold = 0., double relinearize_threshold = 0., bool relinearize = true);
|
||||
|
||||
// needed to create initial estimates
|
||||
const Config getLinearizationPoint() const {return theta_;}
|
||||
const Config& getLinearizationPoint() const {return theta_;}
|
||||
|
||||
// estimate based on incomplete delta (threshold!)
|
||||
const Config calculateEstimate() const {return theta_.expmap(delta_);}
|
||||
Config calculateEstimate() const;
|
||||
|
||||
// estimate based on full delta (note that this is based on the current linearization point)
|
||||
const Config calculateBestEstimate() const {return theta_.expmap(*optimize2(this->root()));}
|
||||
Config calculateBestEstimate() const;
|
||||
|
||||
const Permuted<VectorConfig>& getDelta() const { return delta_; }
|
||||
|
||||
const NonlinearFactorGraph<Config>& getFactorsUnsafe() const { return nonlinearFactors_; }
|
||||
|
||||
const Ordering& getOrdering() const { return ordering_; }
|
||||
|
||||
size_t lastAffectedVariableCount;
|
||||
size_t lastAffectedFactorCount;
|
||||
size_t lastAffectedCliqueCount;
|
||||
|
|
@ -85,13 +108,13 @@ public:
|
|||
|
||||
private:
|
||||
|
||||
std::list<size_t> getAffectedFactors(const std::list<Symbol>& keys) const;
|
||||
boost::shared_ptr<GaussianFactorGraph> relinearizeAffectedFactors(const std::list<Symbol>& affectedKeys) const;
|
||||
FactorGraph<GaussianFactor> getCachedBoundaryFactors(Cliques& orphans);
|
||||
std::list<size_t> getAffectedFactors(const std::list<varid_t>& keys) const;
|
||||
boost::shared_ptr<GaussianFactorGraph> relinearizeAffectedFactors(const std::list<varid_t>& affectedKeys) const;
|
||||
GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans);
|
||||
|
||||
boost::shared_ptr<std::set<Symbol> > recalculate(const std::list<Symbol>& markedKeys, const FactorGraph<GaussianFactor>* newFactors = NULL);
|
||||
void linear_update(const FactorGraph<GaussianFactor>& newFactors);
|
||||
void find_all(sharedClique clique, std::list<Symbol>& keys, const std::list<Symbol>& marked); // helper function
|
||||
boost::shared_ptr<std::set<varid_t> > recalculate(const std::set<varid_t>& markedKeys, const std::vector<varid_t>& newKeys, const GaussianFactorGraph* newFactors = NULL);
|
||||
// void linear_update(const GaussianFactorGraph& newFactors);
|
||||
void find_all(sharedClique clique, std::set<varid_t>& keys, const std::vector<bool>& marked); // helper function
|
||||
|
||||
}; // ISAM2
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,16 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/BayesTree-inl.h>
|
||||
#include <gtsam/inference/JunctionTree.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
#include <gtsam/inference/VariableSlots-inl.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -20,86 +25,170 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template <class FG>
|
||||
JunctionTree<FG>::JunctionTree(FG& fg, const Ordering& ordering) {
|
||||
JunctionTree<FG>::JunctionTree(const FG& fg) {
|
||||
tic("JT 1 constructor");
|
||||
// Symbolic factorization: GaussianFactorGraph -> SymbolicFactorGraph
|
||||
// -> SymbolicBayesNet -> SymbolicBayesTree
|
||||
SymbolicFactorGraph sfg(fg);
|
||||
SymbolicBayesNet sbn = sfg.eliminate(ordering);
|
||||
BayesTree<SymbolicConditional> sbt(sbn);
|
||||
tic("JT 1.1 symbolic elimination");
|
||||
SymbolicBayesNet::shared_ptr sbn = Inference::EliminateSymbolic(fg);
|
||||
toc("JT 1.1 symbolic elimination");
|
||||
tic("JT 1.2 symbolic BayesTree");
|
||||
SymbolicBayesTree sbt(*sbn);
|
||||
toc("JT 1.2 symbolic BayesTree");
|
||||
|
||||
// distribtue factors
|
||||
// distribute factors
|
||||
tic("JT 1.3 distributeFactors");
|
||||
this->root_ = distributeFactors(fg, sbt.root());
|
||||
toc("JT 1.3 distributeFactors");
|
||||
toc("JT 1 constructor");
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
typename JunctionTree<FG>::sharedClique JunctionTree<FG>::distributeFactors(
|
||||
FG& fg, const BayesTree<SymbolicConditional>::sharedClique bayesClique) {
|
||||
// create a new clique in the junction tree
|
||||
sharedClique clique(new Clique());
|
||||
clique->frontal_ = bayesClique->ordering();
|
||||
clique->separator_.insert(bayesClique->separator_.begin(),
|
||||
bayesClique->separator_.end());
|
||||
const FG& fg, const typename SymbolicBayesTree::sharedClique& bayesClique) {
|
||||
|
||||
// recursively call the children
|
||||
BOOST_FOREACH(const BayesTree<SymbolicConditional>::sharedClique bayesChild, bayesClique->children()) {
|
||||
sharedClique child = distributeFactors(fg, bayesChild);
|
||||
clique->children_.push_back(child);
|
||||
child->parent_ = clique;
|
||||
}
|
||||
// Build "target" index. This is an index for each variable of the factors
|
||||
// that involve this variable as their *lowest-ordered* variable. For each
|
||||
// factor, it is the lowest-ordered variable of that factor that pulls the
|
||||
// factor into elimination, after which all of the information in the
|
||||
// factor is contained in the eliminated factors that are passed up the
|
||||
// tree as elimination continues.
|
||||
|
||||
// collect the factors
|
||||
typedef vector<typename FG::sharedFactor> Factors;
|
||||
BOOST_FOREACH(const Symbol& frontal, clique->frontal_) {
|
||||
Factors factors = fg.template findAndRemoveFactors(frontal);
|
||||
BOOST_FOREACH(const typename FG::sharedFactor& factor_, factors)
|
||||
clique->push_back(factor_);
|
||||
}
|
||||
// Two stages - first build an array of the lowest-ordered variable in each
|
||||
// factor and find the last variable to be eliminated.
|
||||
vector<varid_t> lowestOrdered(fg.size());
|
||||
varid_t maxVar = 0;
|
||||
for(size_t i=0; i<fg.size(); ++i)
|
||||
if(fg[i]) {
|
||||
typename FG::factor_type::const_iterator min = std::min_element(fg[i]->begin(), fg[i]->end());
|
||||
if(min == fg[i]->end())
|
||||
lowestOrdered[i] = numeric_limits<varid_t>::max();
|
||||
else {
|
||||
lowestOrdered[i] = *min;
|
||||
maxVar = std::max(maxVar, *min);
|
||||
}
|
||||
}
|
||||
|
||||
return clique;
|
||||
// Now add each factor to the list corresponding to its lowest-ordered
|
||||
// variable.
|
||||
vector<list<size_t, boost::fast_pool_allocator<size_t> > > targets(maxVar+1);
|
||||
for(size_t i=0; i<lowestOrdered.size(); ++i)
|
||||
if(lowestOrdered[i] != numeric_limits<varid_t>::max())
|
||||
targets[lowestOrdered[i]].push_back(i);
|
||||
|
||||
// Now call the recursive distributeFactors
|
||||
return distributeFactors(fg, targets, bayesClique);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
typename JunctionTree<FG>::sharedClique JunctionTree<FG>::distributeFactors(const FG& fg,
|
||||
const std::vector<std::list<size_t,boost::fast_pool_allocator<size_t> > >& targets,
|
||||
const SymbolicBayesTree::sharedClique& bayesClique) {
|
||||
|
||||
if(bayesClique) {
|
||||
// create a new clique in the junction tree
|
||||
list<varid_t> frontals = bayesClique->ordering();
|
||||
sharedClique clique(new Clique(frontals.begin(), frontals.end(), bayesClique->separator_.begin(), bayesClique->separator_.end()));
|
||||
|
||||
// count the factors for this cluster to pre-allocate space
|
||||
{
|
||||
size_t nFactors = 0;
|
||||
BOOST_FOREACH(const varid_t frontal, clique->frontal) {
|
||||
// There may be less variables in "targets" than there really are if
|
||||
// some of the highest-numbered variables do not pull in any factors.
|
||||
if(frontal < targets.size())
|
||||
nFactors += targets[frontal].size(); }
|
||||
clique->reserve(nFactors);
|
||||
}
|
||||
// add the factors to this cluster
|
||||
BOOST_FOREACH(const varid_t frontal, clique->frontal) {
|
||||
if(frontal < targets.size()) {
|
||||
BOOST_FOREACH(const size_t factorI, targets[frontal]) {
|
||||
clique->push_back(fg[factorI]); } } }
|
||||
|
||||
// recursively call the children
|
||||
BOOST_FOREACH(const typename SymbolicBayesTree::sharedClique bayesChild, bayesClique->children()) {
|
||||
sharedClique child = distributeFactors(fg, targets, bayesChild);
|
||||
clique->addChild(child);
|
||||
child->parent() = clique;
|
||||
}
|
||||
return clique;
|
||||
} else
|
||||
return sharedClique();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class FG> template <class Conditional>
|
||||
pair<FG, typename BayesTree<Conditional>::sharedClique>
|
||||
JunctionTree<FG>::eliminateOneClique(sharedClique current) {
|
||||
template <class FG>
|
||||
pair<typename JunctionTree<FG>::BayesTree::sharedClique, typename FG::sharedFactor>
|
||||
JunctionTree<FG>::eliminateOneClique(const boost::shared_ptr<const Clique>& current) const {
|
||||
|
||||
typedef typename BayesTree<Conditional>::sharedClique sharedBtreeClique;
|
||||
FG fg; // factor graph will be assembled from local factors and marginalized children
|
||||
list<sharedBtreeClique> children;
|
||||
fg.push_back(*current); // add the local factor graph
|
||||
fg.reserve(current->size() + current->children().size());
|
||||
fg.push_back(*current); // add the local factors
|
||||
|
||||
BOOST_FOREACH(sharedClique& child, current->children_) {
|
||||
// receive the factors from the child and its clique point
|
||||
FG fgChild; sharedBtreeClique childRoot;
|
||||
boost::tie(fgChild, childRoot) = eliminateOneClique<Conditional>(child);
|
||||
|
||||
fg.push_back(fgChild);
|
||||
children.push_back(childRoot);
|
||||
// receive the factors from the child and its clique point
|
||||
list<typename BayesTree::sharedClique> children;
|
||||
BOOST_FOREACH(const boost::shared_ptr<const Clique>& child, current->children()) {
|
||||
pair<typename BayesTree::sharedClique, typename FG::sharedFactor> tree_factor(
|
||||
eliminateOneClique(child));
|
||||
children.push_back(tree_factor.first);
|
||||
fg.push_back(tree_factor.second);
|
||||
}
|
||||
|
||||
// eliminate the combined factors
|
||||
// warning: fg is being eliminated in-place and will contain marginal afterwards
|
||||
BayesNet<Conditional> bn = fg.eliminateFrontals(current->frontal_);
|
||||
tic("JT 2.1 VariableSlots");
|
||||
VariableSlots variableSlots(fg);
|
||||
toc("JT 2.1 VariableSlots");
|
||||
#ifndef NDEBUG
|
||||
// Debug check that the keys found in the factors match the frontal and
|
||||
// separator keys of the clique.
|
||||
list<varid_t> allKeys;
|
||||
allKeys.insert(allKeys.end(), current->frontal.begin(), current->frontal.end());
|
||||
allKeys.insert(allKeys.end(), current->separator.begin(), current->separator.end());
|
||||
vector<varid_t> varslotsKeys(variableSlots.size());
|
||||
std::transform(variableSlots.begin(), variableSlots.end(), varslotsKeys.begin(),
|
||||
boost::lambda::bind(&VariableSlots::iterator::value_type::first, boost::lambda::_1));
|
||||
assert(std::equal(allKeys.begin(), allKeys.end(), varslotsKeys.begin()));
|
||||
#endif
|
||||
|
||||
// Now that we know which factors and variables, and where variables
|
||||
// come from and go to, create and eliminate the new joint factor.
|
||||
tic("JT 2.2 Combine");
|
||||
typename FG::sharedFactor jointFactor = FG::factor_type::Combine(fg, variableSlots);
|
||||
toc("JT 2.2 Combine");
|
||||
tic("JT 2.3 Eliminate");
|
||||
typename FG::bayesnet_type::shared_ptr fragment = jointFactor->eliminate(current->frontal.size());
|
||||
toc("JT 2.3 Eliminate");
|
||||
assert(std::equal(jointFactor->begin(), jointFactor->end(), current->separator.begin()));
|
||||
|
||||
tic("JT 2.4 Update tree");
|
||||
// create a new clique corresponding the combined factors
|
||||
// BayesTree<Conditional> bayesTree(bn, children);
|
||||
sharedBtreeClique new_clique(new typename BayesTree<Conditional>::Clique(bn));
|
||||
typename BayesTree::sharedClique new_clique(new typename BayesTree::Clique(*fragment));
|
||||
new_clique->children_ = children;
|
||||
|
||||
BOOST_FOREACH(sharedBtreeClique& childRoot, children)
|
||||
BOOST_FOREACH(typename BayesTree::sharedClique& childRoot, children)
|
||||
childRoot->parent_ = new_clique;
|
||||
|
||||
return make_pair(fg, new_clique);
|
||||
new_clique->cachedFactor() = jointFactor;
|
||||
toc("JT 2.4 Update tree");
|
||||
return make_pair(new_clique, jointFactor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class FG> template <class Conditional>
|
||||
typename BayesTree<Conditional>::sharedClique JunctionTree<FG>::eliminate() {
|
||||
pair<FG, typename BayesTree<Conditional>::sharedClique> ret = this->eliminateOneClique<Conditional>(this->root());
|
||||
if (ret.first.nrFactors() != 0)
|
||||
throw runtime_error("JuntionTree::eliminate: elimination failed because of factors left over!");
|
||||
return ret.second;
|
||||
template <class FG>
|
||||
typename JunctionTree<FG>::BayesTree::sharedClique JunctionTree<FG>::eliminate() const {
|
||||
if(this->root()) {
|
||||
tic("JT 2 eliminate");
|
||||
pair<typename BayesTree::sharedClique, typename FG::sharedFactor> ret = this->eliminateOneClique(this->root());
|
||||
if (ret.second->size() != 0)
|
||||
throw runtime_error("JuntionTree::eliminate: elimination failed because of factors left over!");
|
||||
toc("JT 2 eliminate");
|
||||
return ret.first;
|
||||
} else
|
||||
return typename BayesTree::sharedClique();
|
||||
}
|
||||
|
||||
} //namespace gtsam
|
||||
|
|
|
|||
|
|
@ -9,10 +9,13 @@
|
|||
#pragma once
|
||||
|
||||
#include <set>
|
||||
#include <vector>
|
||||
#include <list>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
#include <gtsam/inference/BayesTree.h>
|
||||
#include <gtsam/inference/ClusterTree.h>
|
||||
#include <gtsam/inference/SymbolicConditional.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -32,17 +35,23 @@ namespace gtsam {
|
|||
typedef typename ClusterTree<FG>::Cluster Clique;
|
||||
typedef typename Clique::shared_ptr sharedClique;
|
||||
|
||||
typedef class BayesTree<typename FG::factor_type::Conditional> BayesTree;
|
||||
|
||||
// And we will frequently refer to a symbolic Bayes tree
|
||||
typedef BayesTree<SymbolicConditional> SymbolicBayesTree;
|
||||
typedef gtsam::BayesTree<Conditional> SymbolicBayesTree;
|
||||
|
||||
private:
|
||||
// distribute the factors along the cluster tree
|
||||
sharedClique distributeFactors(FG& fg,
|
||||
const SymbolicBayesTree::sharedClique clique);
|
||||
sharedClique distributeFactors(const FG& fg,
|
||||
const SymbolicBayesTree::sharedClique& clique);
|
||||
|
||||
// utility function called by eliminate
|
||||
template<class Conditional>
|
||||
std::pair<FG, typename BayesTree<Conditional>::sharedClique> eliminateOneClique(sharedClique fg_);
|
||||
// distribute the factors along the cluster tree
|
||||
sharedClique distributeFactors(const FG& fg, const std::vector<std::list<size_t,boost::fast_pool_allocator<size_t> > >& targets,
|
||||
const SymbolicBayesTree::sharedClique& clique);
|
||||
|
||||
// recursive elimination function
|
||||
std::pair<typename BayesTree::sharedClique, typename FG::sharedFactor>
|
||||
eliminateOneClique(const boost::shared_ptr<const Clique>& clique) const;
|
||||
|
||||
public:
|
||||
// constructor
|
||||
|
|
@ -50,11 +59,10 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
// constructor given a factor graph and the elimination ordering
|
||||
JunctionTree(FG& fg, const Ordering& ordering);
|
||||
JunctionTree(const FG& fg);
|
||||
|
||||
// eliminate the factors in the subgraphs
|
||||
template<class Conditional>
|
||||
typename BayesTree<Conditional>::sharedClique eliminate();
|
||||
typename BayesTree::sharedClique eliminate() const;
|
||||
|
||||
}; // JunctionTree
|
||||
|
||||
|
|
|
|||
|
|
@ -15,17 +15,17 @@ check_PROGRAMS =
|
|||
#----------------------------------------------------------------------------------------------------
|
||||
|
||||
# GTSAM core
|
||||
headers += Key.h SymbolMap.h Factor.h Conditional.h IndexTable.h
|
||||
sources += Ordering.cpp
|
||||
check_PROGRAMS += tests/testOrdering tests/testKey
|
||||
headers += SymbolMap.h Factor.h Conditional.h IndexTable.h
|
||||
|
||||
# Symbolic Inference
|
||||
headers += SymbolicConditional.h
|
||||
sources += SymbolicFactor.cpp SymbolicFactorGraph.cpp SymbolicBayesNet.cpp
|
||||
check_PROGRAMS += tests/testSymbolicFactor tests/testSymbolicFactorGraph tests/testSymbolicBayesNet
|
||||
headers += SymbolicFactorGraph.h
|
||||
sources += Factor.cpp SymbolicFactorGraph.cpp
|
||||
check_PROGRAMS += tests/testSymbolicFactor tests/testSymbolicFactorGraph tests/testSymbolicBayesNet
|
||||
check_PROGRAMS += tests/testVariableIndex tests/testVariableSlots
|
||||
|
||||
# Inference
|
||||
headers += inference.h inference-inl.h
|
||||
headers += inference-inl.h VariableSlots-inl.h
|
||||
sources += inference.cpp VariableSlots.cpp
|
||||
headers += graph.h graph-inl.h
|
||||
headers += FactorGraph.h FactorGraph-inl.h
|
||||
headers += ClusterTree.h ClusterTree-inl.h
|
||||
|
|
@ -35,18 +35,24 @@ headers += BayesNet.h BayesNet-inl.h
|
|||
headers += BayesTree.h BayesTree-inl.h
|
||||
headers += ISAM.h ISAM-inl.h
|
||||
headers += ISAM2.h ISAM2-inl.h
|
||||
check_PROGRAMS += tests/testFactorGraph tests/testClusterTree tests/testEliminationTree tests/testJunctionTree tests/testBayesTree tests/testISAM
|
||||
check_PROGRAMS += tests/testFactorGraph
|
||||
check_PROGRAMS += tests/testFactorGraph
|
||||
check_PROGRAMS += tests/testBayesTree
|
||||
check_PROGRAMS += tests/testISAM
|
||||
check_PROGRAMS += tests/testEliminationTree
|
||||
check_PROGRAMS += tests/testClusterTree
|
||||
check_PROGRAMS += tests/testJunctionTree
|
||||
|
||||
#----------------------------------------------------------------------------------------------------
|
||||
# discrete
|
||||
#----------------------------------------------------------------------------------------------------
|
||||
|
||||
# Binary Inference
|
||||
headers += BinaryConditional.h
|
||||
check_PROGRAMS += tests/testBinaryBayesNet
|
||||
#headers += BinaryConditional.h
|
||||
#check_PROGRAMS += tests/testBinaryBayesNet
|
||||
|
||||
# Timing tests
|
||||
noinst_PROGRAMS = tests/timeSymbolMaps
|
||||
#noinst_PROGRAMS = tests/timeSymbolMaps
|
||||
|
||||
#----------------------------------------------------------------------------------------------------
|
||||
# Create a libtool library that is not installed
|
||||
|
|
@ -66,10 +72,14 @@ AM_CXXFLAGS =
|
|||
#----------------------------------------------------------------------------------------------------
|
||||
TESTS = $(check_PROGRAMS)
|
||||
AM_LDFLAGS = $(BOOST_LDFLAGS) $(boost_serialization)
|
||||
LDADD = libinference.la ../base/libbase.la
|
||||
LDADD = libinference.la ../base/libbase.la
|
||||
LDADD += ../CppUnitLite/libCppUnitLite.a ../colamd/libcolamd.la
|
||||
AM_DEFAULT_SOURCE_EXT = .cpp
|
||||
|
||||
if USE_ACCELERATE_MACOS
|
||||
AM_LDFLAGS += -Wl,/System/Library/Frameworks/Accelerate.framework/Accelerate
|
||||
endif
|
||||
|
||||
# rule to run an executable
|
||||
%.run: % $(LDADD)
|
||||
./$^
|
||||
|
|
|
|||
|
|
@ -1,52 +0,0 @@
|
|||
/**
|
||||
* @file Ordering.cpp
|
||||
* @brief Ordering
|
||||
* @author Christian Potthast
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <stdexcept>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Ordering::print(const string& s) const {
|
||||
cout << s << " (" << size() << "):";
|
||||
BOOST_FOREACH(const Symbol& key, *this)
|
||||
cout << " " << (string)key;
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool Ordering::equals(const Ordering &other, double tol) const {
|
||||
return *this == other;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::subtract(const Ordering& keys) const {
|
||||
Ordering newOrdering = *this;
|
||||
BOOST_FOREACH(const Symbol& key, keys) {
|
||||
newOrdering.remove(key);
|
||||
}
|
||||
return newOrdering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Unordered::print(const string& s) const {
|
||||
cout << s << " (" << size() << "):";
|
||||
BOOST_FOREACH(const Symbol& key, *this)
|
||||
cout << " " << (string)key;
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool Unordered::equals(const Unordered &other, double tol) const {
|
||||
return *this == other;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
/**
|
||||
* @file Ordering.h
|
||||
* @brief Ordering of indices for eliminating a factor graph
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <list>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* @class Ordering
|
||||
* @brief ordering of indices for eliminating a factor graph
|
||||
*/
|
||||
class Ordering: public std::list<Symbol>, public Testable<Ordering> {
|
||||
public:
|
||||
/** Default constructor creates empty ordering */
|
||||
Ordering() { }
|
||||
|
||||
/** Create from a single symbol */
|
||||
Ordering(Symbol key) { push_back(key); }
|
||||
|
||||
/** Copy constructor */
|
||||
Ordering(const std::list<Symbol>& keys_in) : std::list<Symbol>(keys_in) {}
|
||||
|
||||
/** whether a key exists */
|
||||
bool exists(const Symbol& key) { return std::find(begin(), end(), key) != end(); }
|
||||
|
||||
// Testable
|
||||
void print(const std::string& s = "Ordering") const;
|
||||
bool equals(const Ordering &ord, double tol=0) const;
|
||||
|
||||
/**
|
||||
* Remove a set of keys from an ordering
|
||||
* @param keys to remove
|
||||
* @return a new ordering without the selected keys
|
||||
*/
|
||||
Ordering subtract(const Ordering& keys) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* @class Unordered
|
||||
* @brief a set of unordered indice
|
||||
*/
|
||||
class Unordered: public std::set<Symbol>, public Testable<Unordered> {
|
||||
public:
|
||||
/** Default constructor creates empty ordering */
|
||||
Unordered() { }
|
||||
|
||||
/** Create from a single symbol */
|
||||
Unordered(Symbol key) { insert(key); }
|
||||
|
||||
/** Copy constructor */
|
||||
Unordered(const std::set<Symbol>& keys_in) : std::set<Symbol>(keys_in) {}
|
||||
|
||||
/** whether a key exists */
|
||||
bool exists(const Symbol& key) { return find(key) != end(); }
|
||||
|
||||
// Testable
|
||||
void print(const std::string& s = "Unordered") const;
|
||||
bool equals(const Unordered &t, double tol=0) const;
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,231 @@
|
|||
/**
|
||||
* @file Permutation.h
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 12, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Inference;
|
||||
|
||||
/**
|
||||
* A permutation reorders variables, for example to reduce fill-in during
|
||||
* elimination. To save computation, the permutation can be applied to
|
||||
* the necessary data structures only once, then multiple computations
|
||||
* performed on the permuted problem. For example, in an iterative
|
||||
* non-linear setting, a permutation can be created from the symbolic graph
|
||||
* structure and applied to the ordering of nonlinear variables once, so
|
||||
* that linearized factor graphs are already correctly ordered and need
|
||||
* not be permuted.
|
||||
*
|
||||
* For convenience, there is also a helper class "Permuted" that transforms
|
||||
* arguments supplied through the square-bracket [] operator through the
|
||||
* permutation. Note that this helper class stores a reference to the original
|
||||
* container.
|
||||
*/
|
||||
class Permutation {
|
||||
protected:
|
||||
std::vector<varid_t> rangeIndices_;
|
||||
|
||||
public:
|
||||
typedef boost::shared_ptr<Permutation> shared_ptr;
|
||||
typedef std::vector<varid_t>::const_iterator const_iterator;
|
||||
typedef std::vector<varid_t>::iterator iterator;
|
||||
|
||||
/**
|
||||
* Create an empty permutation. This cannot do anything, but you can later
|
||||
* assign to it.
|
||||
*/
|
||||
Permutation() {}
|
||||
|
||||
/**
|
||||
* Create an uninitialized permutation. You must assign all values using the
|
||||
* square bracket [] operator or they will be undefined!
|
||||
*/
|
||||
Permutation(varid_t nVars) : rangeIndices_(nVars) {}
|
||||
|
||||
/**
|
||||
* Permute the given variable, i.e. determine it's new index after the
|
||||
* permutation.
|
||||
*/
|
||||
varid_t operator[](varid_t variable) const { check(variable); return rangeIndices_[variable]; }
|
||||
varid_t& operator[](varid_t variable) { check(variable); return rangeIndices_[variable]; }
|
||||
|
||||
/**
|
||||
* The number of variables in the range of this permutation, i.e. the output
|
||||
* space.
|
||||
*/
|
||||
varid_t size() const { return rangeIndices_.size(); }
|
||||
|
||||
/** Whether the permutation contains any entries */
|
||||
bool empty() const { return rangeIndices_.empty(); }
|
||||
|
||||
/**
|
||||
* Resize the permutation. You must fill in the new entries if new new size
|
||||
* is larger than the old one. If the new size is smaller, entries from the
|
||||
* end are discarded.
|
||||
*/
|
||||
void resize(size_t newSize) { rangeIndices_.resize(newSize); }
|
||||
|
||||
/**
|
||||
* Return an identity permutation.
|
||||
*/
|
||||
static Permutation Identity(varid_t nVars);
|
||||
|
||||
iterator begin() { return rangeIndices_.begin(); }
|
||||
const_iterator begin() const { return rangeIndices_.begin(); }
|
||||
iterator end() { return rangeIndices_.end(); }
|
||||
const_iterator end() const { return rangeIndices_.end(); }
|
||||
|
||||
/** Print for debugging */
|
||||
void print(const std::string& str = "Permutation: ") const;
|
||||
|
||||
/** Equals */
|
||||
bool equals(const Permutation& rhs) const { return rangeIndices_ == rhs.rangeIndices_; }
|
||||
|
||||
/**
|
||||
* Syntactic sugar for accessing another container through a permutation.
|
||||
* Allows the syntax:
|
||||
* Permuted<Container> permuted(permutation, container);
|
||||
* permuted[index1];
|
||||
* permuted[index2];
|
||||
* which is equivalent to:
|
||||
* container[permutation[index1]];
|
||||
* container[permutation[index2]];
|
||||
* but more concise.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Permute the permutation, p1.permute(p2)[i] is equivalent to p2[p1[i]].
|
||||
*/
|
||||
Permutation::shared_ptr permute(const Permutation& permutation) const;
|
||||
|
||||
/**
|
||||
* A partial permutation, reorders the variables selected by selector through
|
||||
* partialPermutation. selector and partialPermutation should have the same
|
||||
* size, this is checked if NDEBUG is not defined.
|
||||
*/
|
||||
Permutation::shared_ptr partialPermutation(const Permutation& selector, const Permutation& partialPermutation) const;
|
||||
|
||||
/**
|
||||
* Return the inverse permutation. This is only possible if this is a non-
|
||||
* reducing permutation, that is, (*this)[i] < this->size() for all i<size().
|
||||
* If NDEBUG is not defined, this conditional will be checked.
|
||||
*/
|
||||
Permutation::shared_ptr inverse() const;
|
||||
|
||||
protected:
|
||||
void check(varid_t variable) const { assert(variable < rangeIndices_.size()); }
|
||||
|
||||
friend class Inference;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Definition of Permuted class, see above comment for details.
|
||||
*/
|
||||
template<typename Container, typename ValueType = typename Container::value_reference_type>
|
||||
class Permuted {
|
||||
Permutation permutation_;
|
||||
Container& container_;
|
||||
public:
|
||||
typedef ValueType value_type;
|
||||
|
||||
/** Construct as a permuted view on the Container. The permutation is copied
|
||||
* but only a reference to the container is stored.
|
||||
*/
|
||||
Permuted(const Permutation& permutation, Container& container) : permutation_(permutation), container_(container) {}
|
||||
|
||||
/** Construct as a view on the Container with an identity permutation. Only
|
||||
* a reference to the container is stored.
|
||||
*/
|
||||
Permuted(Container& container) : permutation_(Permutation::Identity(container.size())), container_(container) {}
|
||||
|
||||
/** Access the container through the permutation */
|
||||
value_type operator[](size_t index) const { return container_[permutation_[index]]; }
|
||||
|
||||
// /** Access the container through the permutation (const version) */
|
||||
// const_value_type operator[](size_t index) const;
|
||||
|
||||
/** Permute this view by applying a permutation to the underlying permutation */
|
||||
void permute(const Permutation& permutation) { assert(permutation.size() == this->size()); permutation_ = *permutation_.permute(permutation); }
|
||||
|
||||
/** Access the underlying container */
|
||||
Container* operator->() { return &container_; }
|
||||
|
||||
/** Access the underlying container (const version) */
|
||||
const Container* operator->() const { return &container_; }
|
||||
|
||||
/** Size of the underlying container */
|
||||
size_t size() const { return container_.size(); }
|
||||
|
||||
/** Access to the underlying container */
|
||||
Container& container() { return container_; }
|
||||
|
||||
/** Access to the underlying container (const version) */
|
||||
const Container& container() const { return container_; }
|
||||
|
||||
/** Access the underlying permutation */
|
||||
Permutation& permutation() { return permutation_; }
|
||||
const Permutation& permutation() const { return permutation_; }
|
||||
};
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline Permutation Permutation::Identity(varid_t nVars) {
|
||||
Permutation ret(nVars);
|
||||
for(varid_t i=0; i<nVars; ++i)
|
||||
ret[i] = i;
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline Permutation::shared_ptr Permutation::permute(const Permutation& permutation) const {
|
||||
const size_t nVars = permutation.size();
|
||||
Permutation::shared_ptr result(new Permutation(nVars));
|
||||
for(size_t j=0; j<nVars; ++j) {
|
||||
assert(permutation[j] < rangeIndices_.size());
|
||||
(*result)[j] = operator[](permutation[j]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline Permutation::shared_ptr Permutation::partialPermutation(
|
||||
const Permutation& selector, const Permutation& partialPermutation) const {
|
||||
assert(selector.size() == partialPermutation.size());
|
||||
Permutation::shared_ptr result(new Permutation(*this));
|
||||
|
||||
for(varid_t subsetPos=0; subsetPos<selector.size(); ++subsetPos)
|
||||
(*result)[selector[subsetPos]] = (*this)[selector[partialPermutation[subsetPos]]];
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline Permutation::shared_ptr Permutation::inverse() const {
|
||||
Permutation::shared_ptr result(new Permutation(this->size()));
|
||||
for(varid_t i=0; i<this->size(); ++i) {
|
||||
assert((*this)[i] < this->size());
|
||||
(*result)[(*this)[i]] = i;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
inline void Permutation::print(const std::string& str) const {
|
||||
std::cout << str;
|
||||
BOOST_FOREACH(varid_t s, rangeIndices_) { std::cout << s << " "; }
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
/**
|
||||
* @file SymbolicBayesNet.cpp
|
||||
* @brief Chordal Bayes Net, the result of eliminating a factor graph
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/BayesNet-inl.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Explicitly instantiate so we don't have to include everywhere
|
||||
template class BayesNet<SymbolicConditional>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
/**
|
||||
* @file SymbolicBayesNet.h
|
||||
* @brief Symbolic Chordal Bayes Net, the result of eliminating a factor graph
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
// \callgraph
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <list>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
//#include <boost/serialization/list.hpp>
|
||||
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/SymbolicConditional.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* Symbolic Bayes Chain, the (symbolic) result of eliminating a factor graph
|
||||
*/
|
||||
typedef BayesNet<SymbolicConditional> SymbolicBayesNet;
|
||||
|
||||
/**
|
||||
* Construct from a Bayes net of any type
|
||||
*/
|
||||
template<class Conditional>
|
||||
SymbolicBayesNet symbolic(const BayesNet<Conditional>& bn) {
|
||||
SymbolicBayesNet result;
|
||||
typename BayesNet<Conditional>::const_iterator it = bn.begin();
|
||||
for (; it != bn.end(); it++) {
|
||||
boost::shared_ptr<Conditional> conditional = *it;
|
||||
Symbol key = conditional->key();
|
||||
std::list<Symbol> parents = conditional->parents();
|
||||
SymbolicConditional::shared_ptr c(new SymbolicConditional(key, parents));
|
||||
result.push_back(c);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
} /// namespace gtsam
|
||||
|
|
@ -1,118 +0,0 @@
|
|||
/**
|
||||
* @file SymbolicConditional.h
|
||||
* @brief Symbolic Conditional node for use in Bayes nets
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
// \callgraph
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <list>
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/foreach.hpp> // TODO: make cpp file
|
||||
//#include <boost/serialization/list.hpp>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* Conditional node for use in a Bayes net
|
||||
*/
|
||||
class SymbolicConditional: public Conditional {
|
||||
|
||||
private:
|
||||
std::list<Symbol> parents_;
|
||||
|
||||
public:
|
||||
|
||||
/** convenience typename for a shared pointer to this class */
|
||||
typedef boost::shared_ptr<SymbolicConditional> shared_ptr;
|
||||
|
||||
/**
|
||||
* Empty Constructor to make serialization possible
|
||||
*/
|
||||
SymbolicConditional(){}
|
||||
|
||||
/**
|
||||
* No parents
|
||||
*/
|
||||
SymbolicConditional(const Symbol& key) :
|
||||
Conditional(key) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Single parent
|
||||
*/
|
||||
SymbolicConditional(const Symbol& key, const Symbol& parent) :
|
||||
Conditional(key) {
|
||||
parents_.push_back(parent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Two parents
|
||||
*/
|
||||
SymbolicConditional(const Symbol& key, const Symbol& parent1,
|
||||
const Symbol& parent2) :
|
||||
Conditional(key) {
|
||||
parents_.push_back(parent1);
|
||||
parents_.push_back(parent2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Three parents
|
||||
*/
|
||||
SymbolicConditional(const Symbol& key, const Symbol& parent1,
|
||||
const Symbol& parent2, const Symbol& parent3) :
|
||||
Conditional(key) {
|
||||
parents_.push_back(parent1);
|
||||
parents_.push_back(parent2);
|
||||
parents_.push_back(parent3);
|
||||
}
|
||||
|
||||
/**
|
||||
* A list
|
||||
*/
|
||||
SymbolicConditional(const Symbol& key,
|
||||
const std::list<Symbol>& parents) :
|
||||
Conditional(key), parents_(parents) {
|
||||
}
|
||||
|
||||
/** print */
|
||||
void print(const std::string& s = "SymbolicConditional") const {
|
||||
std::cout << s << " P(" << (std::string)key_;
|
||||
if (parents_.size()>0) std::cout << " |";
|
||||
BOOST_FOREACH(const Symbol& parent, parents_) std::cout << " " << (std::string)parent;
|
||||
std::cout << ")" << std::endl;
|
||||
}
|
||||
|
||||
/** check equality */
|
||||
bool equals(const Conditional& c, double tol = 1e-9) const {
|
||||
if (!Conditional::equals(c)) return false;
|
||||
const SymbolicConditional* p = dynamic_cast<const SymbolicConditional*> (&c);
|
||||
if (p == NULL) return false;
|
||||
return parents_ == p->parents_;
|
||||
}
|
||||
|
||||
/** return parents */
|
||||
std::list<Symbol> parents() const { return parents_;}
|
||||
|
||||
/** find the number of parents */
|
||||
size_t nrParents() const {
|
||||
return parents_.size();
|
||||
}
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version) {
|
||||
ar & boost::serialization::make_nvp("Conditional", boost::serialization::base_object<Conditional>(*this));
|
||||
ar & BOOST_SERIALIZATION_NVP(parents_);
|
||||
}
|
||||
};
|
||||
} /// namespace gtsam
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* SymbolicFactor.cpp
|
||||
*
|
||||
* Created on: Oct 29, 2009
|
||||
* Author: Frank Dellaert
|
||||
*/
|
||||
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <gtsam/inference/SymbolicConditional.h>
|
||||
#include <gtsam/inference/SymbolicFactor.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
// trick from some reading group
|
||||
#define FOREACH_PAIR( KEY, VAL, COL) BOOST_FOREACH (boost::tie(KEY,VAL),COL)
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicFactor::SymbolicFactor(const boost::shared_ptr<SymbolicConditional>& c) {
|
||||
// initialize keys_ with parents
|
||||
keys_ = c->parents();
|
||||
// add key on which conditional is defined
|
||||
keys_.push_back(c->key());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicFactor::SymbolicFactor(const vector<shared_ptr> & factors) {
|
||||
|
||||
// store keys in a map to make them unique (set is not portable)
|
||||
SymbolMap<Symbol> map;
|
||||
BOOST_FOREACH(shared_ptr factor, factors)
|
||||
BOOST_FOREACH(const Symbol& key, factor->keys())
|
||||
map.insert(make_pair(key,key));
|
||||
|
||||
// create the unique keys
|
||||
Symbol key,val;
|
||||
FOREACH_PAIR(key, val, map)
|
||||
keys_.push_back(key);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void SymbolicFactor::print(const string& s) const {
|
||||
cout << s << " ";
|
||||
BOOST_FOREACH(const Symbol& key, keys_) cout << " " << (string)key;
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool SymbolicFactor::equals(const SymbolicFactor& other, double tol) const {
|
||||
return keys_ == other.keys_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<SymbolicConditional::shared_ptr, SymbolicFactor::shared_ptr>
|
||||
SymbolicFactor::eliminate(const Symbol& key) const
|
||||
{
|
||||
// get keys from input factor
|
||||
list<Symbol> separator;
|
||||
BOOST_FOREACH(const Symbol& j,keys_)
|
||||
if (j!=key) separator.push_back(j);
|
||||
|
||||
// start empty remaining factor to be returned
|
||||
boost::shared_ptr<SymbolicFactor> lf(new SymbolicFactor(separator));
|
||||
|
||||
// create SymbolicConditional on separator
|
||||
SymbolicConditional::shared_ptr cg (new SymbolicConditional(key,separator));
|
||||
|
||||
return make_pair(cg,lf);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
}
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* SymbolicFactor.h
|
||||
*
|
||||
* Created on: Oct 29, 2009
|
||||
* Author: Frank Dellaert
|
||||
*/
|
||||
|
||||
#ifndef SYMBOLICFACTOR_H_
|
||||
#define SYMBOLICFACTOR_H_
|
||||
|
||||
#include <list>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class SymbolicConditional;
|
||||
|
||||
/** Symbolic Factor */
|
||||
class SymbolicFactor: public Testable<SymbolicFactor> {
|
||||
private:
|
||||
|
||||
std::list<Symbol> keys_;
|
||||
|
||||
public:
|
||||
|
||||
typedef boost::shared_ptr<SymbolicFactor> shared_ptr;
|
||||
|
||||
/** Construct from SymbolicConditional */
|
||||
SymbolicFactor(const boost::shared_ptr<SymbolicConditional>& c);
|
||||
|
||||
/** Constructor from a list of keys */
|
||||
SymbolicFactor(const Ordering& keys) :
|
||||
keys_(keys) {
|
||||
}
|
||||
|
||||
/** Construct unary factor */
|
||||
SymbolicFactor(const Symbol& key) {
|
||||
keys_.push_back(key);
|
||||
}
|
||||
|
||||
/** Construct binary factor */
|
||||
SymbolicFactor(const Symbol& key1, const Symbol& key2) {
|
||||
keys_.push_back(key1);
|
||||
keys_.push_back(key2);
|
||||
}
|
||||
|
||||
/** Construct ternary factor */
|
||||
SymbolicFactor(const Symbol& key1, const Symbol& key2, const Symbol& key3) {
|
||||
keys_.push_back(key1);
|
||||
keys_.push_back(key2);
|
||||
keys_.push_back(key3);
|
||||
}
|
||||
|
||||
/** Construct 4-way factor */
|
||||
SymbolicFactor(const Symbol& key1, const Symbol& key2, const Symbol& key3, const Symbol& key4) {
|
||||
keys_.push_back(key1);
|
||||
keys_.push_back(key2);
|
||||
keys_.push_back(key3);
|
||||
keys_.push_back(key4);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor that combines a set of factors
|
||||
* @param factors Set of factors to combine
|
||||
*/
|
||||
SymbolicFactor(const std::vector<shared_ptr> & factors);
|
||||
|
||||
/** print */
|
||||
void print(const std::string& s = "SymbolicFactor") const;
|
||||
|
||||
/** check equality */
|
||||
bool equals(const SymbolicFactor& other, double tol = 1e-9) const;
|
||||
|
||||
/**
|
||||
* Find all variables
|
||||
* @return The set of all variable keys
|
||||
*/
|
||||
std::list<Symbol> keys() const {
|
||||
return keys_;
|
||||
}
|
||||
|
||||
/**
|
||||
* eliminate one of the variables connected to this factor
|
||||
* @param key the key of the node to be eliminated
|
||||
* @return a new factor and a symbolic conditional on the eliminated variable
|
||||
*/
|
||||
std::pair<boost::shared_ptr<SymbolicConditional>, shared_ptr>
|
||||
eliminate(const Symbol& key) const;
|
||||
|
||||
/**
|
||||
* Check if empty factor
|
||||
*/
|
||||
inline bool empty() const {
|
||||
return keys_.empty();
|
||||
}
|
||||
|
||||
/** get the size of the factor */
|
||||
std::size_t size() const {
|
||||
return keys_.size();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* SYMBOLICFACTOR_H_ */
|
||||
|
|
@ -9,9 +9,9 @@
|
|||
#include <fstream>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/BayesNet-inl.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
|
||||
using namespace std;
|
||||
|
|
@ -19,34 +19,53 @@ using namespace std;
|
|||
namespace gtsam {
|
||||
|
||||
// Explicitly instantiate so we don't have to include everywhere
|
||||
template class FactorGraph<SymbolicFactor>;
|
||||
template class FactorGraph<Factor>;
|
||||
template class BayesNet<Conditional>;
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicFactorGraph::SymbolicFactorGraph(const BayesNet<Conditional>& bayesNet) :
|
||||
FactorGraph<Factor>(bayesNet) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<SymbolicConditional>
|
||||
SymbolicFactorGraph::eliminateOne(const Symbol& key){
|
||||
return gtsam::eliminateOne<SymbolicFactor,SymbolicConditional>(*this, key);
|
||||
void SymbolicFactorGraph::push_factor(varid_t key) {
|
||||
boost::shared_ptr<Factor> factor(new Factor(key));
|
||||
push_back(factor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicBayesNet
|
||||
SymbolicFactorGraph::eliminate(const Ordering& ordering)
|
||||
{
|
||||
SymbolicBayesNet bayesNet;
|
||||
/** Push back binary factor */
|
||||
void SymbolicFactorGraph::push_factor(varid_t key1, varid_t key2) {
|
||||
boost::shared_ptr<Factor> factor(new Factor(key1,key2));
|
||||
push_back(factor);
|
||||
}
|
||||
|
||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
||||
SymbolicConditional::shared_ptr conditional =
|
||||
gtsam::eliminateOne<SymbolicFactor,SymbolicConditional>(*this,key);
|
||||
bayesNet.push_back(conditional);
|
||||
}
|
||||
return bayesNet;
|
||||
}
|
||||
/** Push back ternary factor */
|
||||
void SymbolicFactorGraph::push_factor(varid_t key1, varid_t key2, varid_t key3) {
|
||||
boost::shared_ptr<Factor> factor(new Factor(key1,key2,key3));
|
||||
push_back(factor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SymbolicBayesNet
|
||||
SymbolicFactorGraph::eliminateFrontals(const Ordering& ordering)
|
||||
{
|
||||
return eliminate(ordering);
|
||||
}
|
||||
/** Push back 4-way factor */
|
||||
void SymbolicFactorGraph::push_factor(varid_t key1, varid_t key2, varid_t key3, varid_t key4) {
|
||||
boost::shared_ptr<Factor> factor(new Factor(key1,key2,key3,key4));
|
||||
push_back(factor);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> >
|
||||
SymbolicFactorGraph::keys() const {
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> > keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this) {
|
||||
if(factor) keys.insert(factor->begin(), factor->end()); }
|
||||
return keys;
|
||||
}
|
||||
|
||||
|
||||
// /* ************************************************************************* */
|
||||
// SymbolicBayesNet
|
||||
// SymbolicFactorGraph::eliminateFrontals(const Ordering& ordering)
|
||||
// {
|
||||
// return Inference::Eliminate(ordering);
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,76 +9,68 @@
|
|||
|
||||
#include <string>
|
||||
#include <list>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/inference/SymbolicFactor.h>
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class SymbolicConditional;
|
||||
typedef BayesNet<Conditional> SymbolicBayesNet;
|
||||
|
||||
/** Symbolic Factor Graph */
|
||||
class SymbolicFactorGraph: public FactorGraph<SymbolicFactor> {
|
||||
public:
|
||||
/** Symbolic Factor Graph */
|
||||
class SymbolicFactorGraph: public FactorGraph<Factor> {
|
||||
public:
|
||||
typedef SymbolicBayesNet bayesnet_type;
|
||||
typedef VariableIndex<> variableindex_type;
|
||||
|
||||
/** Construct empty factor graph */
|
||||
SymbolicFactorGraph() {}
|
||||
/** Construct empty factor graph */
|
||||
SymbolicFactorGraph() {}
|
||||
|
||||
/** Push back unary factor */
|
||||
void push_factor(const Symbol& key) {
|
||||
boost::shared_ptr<SymbolicFactor> factor(new SymbolicFactor(key));
|
||||
push_back(factor);
|
||||
}
|
||||
/** Construct from a BayesNet */
|
||||
SymbolicFactorGraph(const BayesNet<Conditional>& bayesNet);
|
||||
|
||||
/** Push back binary factor */
|
||||
void push_factor(const Symbol& key1, const Symbol& key2) {
|
||||
boost::shared_ptr<SymbolicFactor> factor(new SymbolicFactor(key1,key2));
|
||||
push_back(factor);
|
||||
}
|
||||
/** Push back unary factor */
|
||||
void push_factor(varid_t key);
|
||||
|
||||
/** Push back ternary factor */
|
||||
void push_factor(const Symbol& key1, const Symbol& key2, const Symbol& key3) {
|
||||
boost::shared_ptr<SymbolicFactor> factor(new SymbolicFactor(key1,key2,key3));
|
||||
push_back(factor);
|
||||
}
|
||||
/** Push back binary factor */
|
||||
void push_factor(varid_t key1, varid_t key2);
|
||||
|
||||
/** Push back 4-way factor */
|
||||
void push_factor(const Symbol& key1, const Symbol& key2, const Symbol& key3, const Symbol& key4) {
|
||||
boost::shared_ptr<SymbolicFactor> factor(new SymbolicFactor(key1,key2,key3,key4));
|
||||
push_back(factor);
|
||||
}
|
||||
/** Push back ternary factor */
|
||||
void push_factor(varid_t key1, varid_t key2, varid_t key3);
|
||||
|
||||
/**
|
||||
* Construct from a factor graph of any type
|
||||
*/
|
||||
template<class Factor>
|
||||
SymbolicFactorGraph(const FactorGraph<Factor>& fg) {
|
||||
for (size_t i = 0; i < fg.size(); i++) {
|
||||
boost::shared_ptr<Factor> f = fg[i];
|
||||
std::list<Symbol> keys = f->keys();
|
||||
SymbolicFactor::shared_ptr factor(new SymbolicFactor(keys));
|
||||
push_back(factor);
|
||||
}
|
||||
}
|
||||
/** Push back 4-way factor */
|
||||
void push_factor(varid_t key1, varid_t key2, varid_t key3, varid_t key4);
|
||||
|
||||
/**
|
||||
* Eliminate a single node yielding a conditional Gaussian
|
||||
* Eliminates the factors from the factor graph through findAndRemoveFactors
|
||||
* and adds a new factor on the separator to the factor graph
|
||||
*/
|
||||
boost::shared_ptr<SymbolicConditional> eliminateOne(const Symbol& key);
|
||||
/**
|
||||
* Construct from a factor graph of any type
|
||||
*/
|
||||
template<class Factor>
|
||||
SymbolicFactorGraph(const FactorGraph<Factor>& fg);
|
||||
|
||||
/**
|
||||
* eliminate factor graph in place(!) in the given order, yielding
|
||||
* a chordal Bayes net
|
||||
*/
|
||||
SymbolicBayesNet eliminate(const Ordering& ordering);
|
||||
/**
|
||||
* Return the set of variables involved in the factors (computes a set
|
||||
* union).
|
||||
*/
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> > keys() const;
|
||||
|
||||
/**
|
||||
* Same as eliminate in the SymbolicFactorGraph case
|
||||
*/
|
||||
SymbolicBayesNet eliminateFrontals(const Ordering& ordering);
|
||||
};
|
||||
/**
|
||||
* Same as eliminate in the SymbolicFactorGraph case
|
||||
*/
|
||||
// SymbolicBayesNet eliminateFrontals(const Ordering& ordering);
|
||||
};
|
||||
|
||||
/* Template function implementation */
|
||||
template<class FactorType>
|
||||
SymbolicFactorGraph::SymbolicFactorGraph(const FactorGraph<FactorType>& fg) {
|
||||
for (size_t i = 0; i < fg.size(); i++) {
|
||||
if(fg[i]) {
|
||||
Factor::shared_ptr factor(new Factor(*fg[i]));
|
||||
push_back(factor);
|
||||
} else
|
||||
push_back(Factor::shared_ptr());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -0,0 +1,242 @@
|
|||
/**
|
||||
* @file VariableIndex.h
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 12, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/Permutation.h>
|
||||
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
#include <iostream>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Inference;
|
||||
|
||||
/**
|
||||
* The VariableIndex can internally use either a vector or a deque to store
|
||||
* variables. For one-shot elimination, using a vector will give the best
|
||||
* performance, and this is the default. For incremental updates, such as in
|
||||
* ISAM and ISAM2, deque storage is used to prevent frequent reallocation.
|
||||
*/
|
||||
struct VariableIndexStorage_vector { template<typename T> struct type_factory { typedef std::vector<T> type; }; };
|
||||
struct VariableIndexStorage_deque { template<typename T> struct type_factory { typedef std::deque<T> type; }; };
|
||||
|
||||
/**
|
||||
* The VariableIndex class stores the information necessary to perform
|
||||
* elimination, including an index of factors involving each variable and
|
||||
* the structure of the intermediate joint factors that develop during
|
||||
* elimination. This maps variables to deque's of pair<size_t,size_t>,
|
||||
* which is pairs the factor index with the position of the variable within
|
||||
* the factor.
|
||||
*/
|
||||
struct _mapped_factor_type {
|
||||
size_t factorIndex;
|
||||
size_t variablePosition;
|
||||
_mapped_factor_type(size_t _factorIndex, size_t _variablePosition) : factorIndex(_factorIndex), variablePosition(_variablePosition) {}
|
||||
bool operator==(const _mapped_factor_type& o) const { return factorIndex == o.factorIndex && variablePosition == o.variablePosition; }
|
||||
};
|
||||
template<class VariableIndexStorage=VariableIndexStorage_vector>
|
||||
class VariableIndex {
|
||||
public:
|
||||
|
||||
typedef _mapped_factor_type mapped_factor_type;
|
||||
typedef boost::shared_ptr<VariableIndex> shared_ptr;
|
||||
typedef std::deque<mapped_factor_type> mapped_type;
|
||||
typedef typename mapped_type::iterator factor_iterator;
|
||||
typedef typename mapped_type::const_iterator const_factor_iterator;
|
||||
|
||||
protected:
|
||||
typedef typename VariableIndexStorage::template type_factory<mapped_type>::type storage_type;
|
||||
storage_type indexUnpermuted_;
|
||||
Permuted<storage_type, typename storage_type::value_type&> index_;
|
||||
size_t nFactors_;
|
||||
size_t nEntries_; // Sum of involved variable counts of each factor
|
||||
|
||||
public:
|
||||
VariableIndex() : index_(indexUnpermuted_), nFactors_(0), nEntries_(0) {}
|
||||
template<class FactorGraph> VariableIndex(const FactorGraph& factorGraph);
|
||||
|
||||
varid_t size() const { return index_.size(); }
|
||||
size_t nFactors() const { return nFactors_; }
|
||||
size_t nEntries() const { return nEntries_; }
|
||||
const mapped_type& operator[](varid_t variable) const { checkVar(variable); return index_[variable]; }
|
||||
mapped_type& operator[](varid_t variable) { checkVar(variable); return index_[variable]; }
|
||||
void permute(const Permutation& permutation);
|
||||
// template<class Derived> void augment(const Derived& varIndex);
|
||||
template<class FactorGraph> void augment(const FactorGraph& factorGraph);
|
||||
void rebaseFactors(ptrdiff_t baseIndexChange);
|
||||
|
||||
template<class Derived> bool equals(const Derived& other, double tol=0.0) const;
|
||||
void print(const std::string& str = "VariableIndex: ") const;
|
||||
|
||||
protected:
|
||||
VariableIndex(size_t nVars) : indexUnpermuted_(nVars), index_(indexUnpermuted_), nFactors_(0), nEntries_(0) {}
|
||||
void checkVar(varid_t variable) const { assert(variable < index_.size()); }
|
||||
|
||||
factor_iterator factorsBegin(varid_t variable) { checkVar(variable); return index_[variable].begin(); }
|
||||
const_factor_iterator factorsBegin(varid_t variable) const { checkVar(variable); return index_[variable].begin(); }
|
||||
factor_iterator factorsEnd(varid_t variable) { checkVar(variable); return index_[variable].end(); }
|
||||
const_factor_iterator factorsEnd(varid_t variable) const { checkVar(variable); return index_[variable].end(); }
|
||||
|
||||
friend class Inference;
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void VariableIndex<Storage>::permute(const Permutation& permutation) {
|
||||
#ifndef NDEBUG
|
||||
// Assert that the permutation does not leave behind any non-empty variables,
|
||||
// otherwise the nFactors and nEntries counts would be incorrect.
|
||||
for(varid_t j=0; j<this->index_.size(); ++j)
|
||||
if(find(permutation.begin(), permutation.end(), j) == permutation.end())
|
||||
assert(this->operator[](j).empty());
|
||||
#endif
|
||||
index_.permute(permutation);
|
||||
// storage_type original(this->index_.size());
|
||||
// this->index_.swap(original);
|
||||
// for(varid_t j=0; j<permutation.size(); ++j)
|
||||
// this->index_[j].swap(original[permutation[j]]);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
template<class FactorGraph>
|
||||
VariableIndex<Storage>::VariableIndex(const FactorGraph& factorGraph) :
|
||||
index_(indexUnpermuted_), nFactors_(0), nEntries_(0) {
|
||||
|
||||
// If the factor graph is empty, return an empty index because inside this
|
||||
// if block we assume at least one factor.
|
||||
if(factorGraph.size() > 0) {
|
||||
// Find highest-numbered variable
|
||||
varid_t maxVar = 0;
|
||||
BOOST_FOREACH(const typename FactorGraph::sharedFactor& factor, factorGraph) {
|
||||
if(factor) {
|
||||
BOOST_FOREACH(const varid_t key, factor->keys()) {
|
||||
if(key > maxVar)
|
||||
maxVar = key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allocate index
|
||||
index_.container().resize(maxVar+1);
|
||||
index_.permutation() = Permutation::Identity(maxVar+1);
|
||||
|
||||
// Build index mapping from variable id to factor index
|
||||
for(size_t fi=0; fi<factorGraph.size(); ++fi)
|
||||
if(factorGraph[fi]) {
|
||||
varid_t fvari = 0;
|
||||
BOOST_FOREACH(const varid_t key, factorGraph[fi]->keys()) {
|
||||
index_[key].push_back(mapped_factor_type(fi, fvari));
|
||||
++ fvari;
|
||||
++ nEntries_;
|
||||
}
|
||||
++ nFactors_;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///* ************************************************************************* */
|
||||
//template<class Storage>
|
||||
//template<class Derived>
|
||||
//void VariableIndex<Storage>::augment(const Derived& varIndex) {
|
||||
// nFactors_ = std::max(nFactors_, varIndex.nFactors());
|
||||
// nEntries_ = nEntries_ + varIndex.nEntries();
|
||||
// varid_t originalSize = index_.size();
|
||||
// index_.container().resize(std::max(index_.size(), varIndex.size()));
|
||||
// index_.permutation().resize(index_.container().size());
|
||||
// for(varid_t var=originalSize; var<index_.permutation().size(); ++var)
|
||||
// index_.permutation()[var] = var;
|
||||
// for(varid_t var=0; var<varIndex.size(); ++var)
|
||||
// index_[var].insert(index_[var].begin(), varIndex[var].begin(), varIndex[var].end());
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
template<class FactorGraph>
|
||||
void VariableIndex<Storage>::augment(const FactorGraph& factorGraph) {
|
||||
// If the factor graph is empty, return an empty index because inside this
|
||||
// if block we assume at least one factor.
|
||||
if(factorGraph.size() > 0) {
|
||||
// Find highest-numbered variable
|
||||
varid_t maxVar = 0;
|
||||
BOOST_FOREACH(const typename FactorGraph::sharedFactor& factor, factorGraph) {
|
||||
if(factor) {
|
||||
BOOST_FOREACH(const varid_t key, factor->keys()) {
|
||||
if(key > maxVar)
|
||||
maxVar = key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allocate index
|
||||
varid_t originalSize = index_.size();
|
||||
index_.container().resize(std::max(index_.size(), maxVar+1));
|
||||
index_.permutation().resize(index_.container().size());
|
||||
for(varid_t var=originalSize; var<index_.permutation().size(); ++var)
|
||||
index_.permutation()[var] = var;
|
||||
|
||||
// Augment index mapping from variable id to factor index
|
||||
size_t orignFactors = nFactors_;
|
||||
for(size_t fi=0; fi<factorGraph.size(); ++fi)
|
||||
if(factorGraph[fi]) {
|
||||
varid_t fvari = 0;
|
||||
BOOST_FOREACH(const varid_t key, factorGraph[fi]->keys()) {
|
||||
index_[key].push_back(mapped_factor_type(orignFactors + fi, fvari));
|
||||
++ fvari;
|
||||
++ nEntries_;
|
||||
}
|
||||
++ nFactors_;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void VariableIndex<Storage>::rebaseFactors(ptrdiff_t baseIndexChange) {
|
||||
BOOST_FOREACH(mapped_type& factors, index_.container()) {
|
||||
BOOST_FOREACH(mapped_factor_type& factor, factors) {
|
||||
factor.factorIndex += baseIndexChange;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
template<class Derived>
|
||||
bool VariableIndex<Storage>::equals(const Derived& other, double tol) const {
|
||||
if(this->nEntries_ == other.nEntries_ && this->nFactors_ == other.nFactors_ && this->index_.size() == other.index_.size()) {
|
||||
for(size_t var=0; var<this->index_.size(); ++var)
|
||||
if(this->index_[var] != other.index_[var])
|
||||
return false;
|
||||
} else
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void VariableIndex<Storage>::print(const std::string& str) const {
|
||||
std::cout << str;
|
||||
varid_t var = 0;
|
||||
BOOST_FOREACH(const mapped_type& variable, index_.container()) {
|
||||
Permutation::const_iterator rvar = find(index_.permutation().begin(), index_.permutation().end(), var);
|
||||
assert(rvar != index_.permutation().end());
|
||||
std::cout << "var " << *rvar << ":";
|
||||
BOOST_FOREACH(const mapped_factor_type& factor, variable) {
|
||||
std::cout << " " << factor.factorIndex << "-" << factor.variablePosition;
|
||||
}
|
||||
std::cout << "\n";
|
||||
++ var;
|
||||
}
|
||||
std::cout << std::flush;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* @file VariableSlots-inl.h
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Oct 5, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/inference/VariableSlots.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
using namespace std;
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FG>
|
||||
VariableSlots::VariableSlots(const FG& factorGraph) {
|
||||
static const bool debug = false;
|
||||
|
||||
// Compute a mapping (called variableSlots) *from* each involved
|
||||
// variable that will be in the new joint factor *to* the slot in each
|
||||
// removed factor in which that variable appears. For each variable,
|
||||
// this is stored as a vector of slot numbers, stored in order of the
|
||||
// removed factors. The slot number is the max integer value if the
|
||||
// factor does not involve that variable.
|
||||
size_t jointFactorPos = 0;
|
||||
BOOST_FOREACH(const typename FG::sharedFactor& factor, factorGraph) {
|
||||
assert(factor);
|
||||
varid_t factorVarSlot = 0;
|
||||
BOOST_FOREACH(const varid_t involvedVariable, *factor) {
|
||||
// Set the slot in this factor for this variable. If the
|
||||
// variable was not already discovered, create an array for it
|
||||
// that we'll fill with the slot indices for each factor that
|
||||
// we're combining. Initially we put the max integer value in
|
||||
// the array entry for each factor that will indicate the factor
|
||||
// does not involve the variable.
|
||||
iterator thisVarSlots; bool inserted;
|
||||
boost::tie(thisVarSlots, inserted) = this->insert(make_pair(involvedVariable, vector<varid_t>()));
|
||||
if(inserted)
|
||||
thisVarSlots->second.resize(factorGraph.size(), numeric_limits<varid_t>::max());
|
||||
thisVarSlots->second[jointFactorPos] = factorVarSlot;
|
||||
if(debug) cout << " var " << involvedVariable << " rowblock " << jointFactorPos << " comes from factor's slot " << factorVarSlot << endl;
|
||||
++ factorVarSlot;
|
||||
}
|
||||
++ jointFactorPos;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
/**
|
||||
* @file VariableSlots.cpp
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Oct 5, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/VariableSlots.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/** print */
|
||||
void VariableSlots::print(const std::string& str) const {
|
||||
if(this->empty())
|
||||
cout << "empty" << endl;
|
||||
else {
|
||||
cout << str << "\n";
|
||||
cout << "Var:\t";
|
||||
BOOST_FOREACH(const value_type& slot, *this) { cout << slot.first << "\t"; }
|
||||
cout << "\n";
|
||||
|
||||
for(size_t i=0; i<this->begin()->second.size(); ++i) {
|
||||
cout << " \t";
|
||||
BOOST_FOREACH(const value_type& slot, *this) {
|
||||
if(slot.second[i] == numeric_limits<varid_t>::max())
|
||||
cout << "x" << "\t";
|
||||
else
|
||||
cout << slot.second[i] << "\t";
|
||||
}
|
||||
cout << "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** equals */
|
||||
bool VariableSlots::equals(const VariableSlots& rhs, double tol) const {
|
||||
return *this == rhs;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/**
|
||||
* @file VariableSlots.h
|
||||
* @brief VariableSlots describes the structure of a combined factor in terms of where each block comes from in the source factors.
|
||||
* @author Richard Roberts
|
||||
* @created Oct 4, 2010
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* A combined factor is assembled as one block of rows for each component
|
||||
* factor. In each row-block (factor), some of the column-blocks (variables)
|
||||
* may be empty since factors involving different sets of variables are
|
||||
* interleaved.
|
||||
*
|
||||
* VariableSlots describes the 2D block structure of the combined factor. It
|
||||
* is essentially a map<varid_t, vector<size_t> >. The varid_t is the real
|
||||
* variable index of the combined factor slot. The vector<size_t> tells, for
|
||||
* each row-block (factor), which column-block (variable slot) from the
|
||||
* component factor appears in this block of the combined factor.
|
||||
*
|
||||
* As an example, if the combined factor contains variables 1, 3, and 5, then
|
||||
* "variableSlots[3][2] == 0" indicates that column-block 1 (corresponding to
|
||||
* variable index 3), row-block 2 (also meaning component factor 2), comes from
|
||||
* column-block 0 of component factor 2.
|
||||
*
|
||||
* Note that in the case of GaussianFactor, the rows of the combined factor are
|
||||
* further sorted so that the column-block position of the first structural
|
||||
* non-zero increases monotonically through the rows. This additional process
|
||||
* is not performed by this class.
|
||||
*/
|
||||
|
||||
// Internal-use-only typedef for the VariableSlots map base class because this is such a long type name
|
||||
typedef std::map<varid_t, std::vector<varid_t>, std::less<varid_t>,
|
||||
boost::fast_pool_allocator<std::pair<const varid_t, std::vector<varid_t> > > > _VariableSlots_map;
|
||||
|
||||
class VariableSlots : public _VariableSlots_map, public Testable<VariableSlots> {
|
||||
|
||||
public:
|
||||
|
||||
typedef _VariableSlots_map Base;
|
||||
|
||||
/**
|
||||
* Constructor from a set of factors to be combined. Sorts the variables
|
||||
* and keeps track of which variable from each factor ends up in each slot
|
||||
* of the combined factor, as described in the class comment.
|
||||
*/
|
||||
template<class FG>
|
||||
VariableSlots(const FG& factorGraph);
|
||||
|
||||
/** print */
|
||||
void print(const std::string& str = "VariableSlots: ") const;
|
||||
|
||||
/** equals */
|
||||
bool equals(const VariableSlots& rhs, double tol = 0.0) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
@ -48,46 +48,46 @@ list<Key> predecessorMap2Keys(const PredecessorMap<Key>& p_map) {
|
|||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class F, class Key>
|
||||
SDGraph<Key> toBoostGraph(const G& graph) {
|
||||
// convert the factor graph to boost graph
|
||||
SDGraph<Key> g;
|
||||
typedef typename boost::graph_traits<SDGraph<Key> >::vertex_descriptor BoostVertex;
|
||||
map<Key, BoostVertex> key2vertex;
|
||||
BoostVertex v1, v2;
|
||||
typename G::const_iterator itFactor;
|
||||
for(itFactor=graph.begin(); itFactor!=graph.end(); itFactor++) {
|
||||
if ((*itFactor)->keys().size() > 2)
|
||||
throw(invalid_argument("toBoostGraph: only support factors with at most two keys"));
|
||||
|
||||
if ((*itFactor)->keys().size() == 1)
|
||||
continue;
|
||||
|
||||
boost::shared_ptr<F> factor = boost::dynamic_pointer_cast<F>(*itFactor);
|
||||
if (!factor) continue;
|
||||
|
||||
Key key1 = factor->key1();
|
||||
Key key2 = factor->key2();
|
||||
|
||||
if (key2vertex.find(key1) == key2vertex.end()) {
|
||||
v1 = add_vertex(key1, g);
|
||||
key2vertex.insert(make_pair(key1, v1));
|
||||
} else
|
||||
v1 = key2vertex[key1];
|
||||
|
||||
if (key2vertex.find(key2) == key2vertex.end()) {
|
||||
v2 = add_vertex(key2, g);
|
||||
key2vertex.insert(make_pair(key2, v2));
|
||||
} else
|
||||
v2 = key2vertex[key2];
|
||||
|
||||
boost::property<boost::edge_weight_t, double> edge_property(1.0); // assume constant edge weight here
|
||||
boost::add_edge(v1, v2, edge_property, g);
|
||||
}
|
||||
|
||||
return g;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//template<class G, class F, class Key>
|
||||
// SL-NEEDED? SDGraph<Key> toBoostGraph(const G& graph) {
|
||||
// // convert the factor graph to boost graph
|
||||
// SDGraph<Key> g;
|
||||
// typedef typename boost::graph_traits<SDGraph<Key> >::vertex_descriptor BoostVertex;
|
||||
// map<Key, BoostVertex> key2vertex;
|
||||
// BoostVertex v1, v2;
|
||||
// typename G::const_iterator itFactor;
|
||||
// for(itFactor=graph.begin(); itFactor!=graph.end(); itFactor++) {
|
||||
// if ((*itFactor)->keys().size() > 2)
|
||||
// throw(invalid_argument("toBoostGraph: only support factors with at most two keys"));
|
||||
//
|
||||
// if ((*itFactor)->keys().size() == 1)
|
||||
// continue;
|
||||
//
|
||||
// boost::shared_ptr<F> factor = boost::dynamic_pointer_cast<F>(*itFactor);
|
||||
// if (!factor) continue;
|
||||
//
|
||||
// Key key1 = factor->key1();
|
||||
// Key key2 = factor->key2();
|
||||
//
|
||||
// if (key2vertex.find(key1) == key2vertex.end()) {
|
||||
// v1 = add_vertex(key1, g);
|
||||
// key2vertex.insert(make_pair(key1, v1));
|
||||
// } else
|
||||
// v1 = key2vertex[key1];
|
||||
//
|
||||
// if (key2vertex.find(key2) == key2vertex.end()) {
|
||||
// v2 = add_vertex(key2, g);
|
||||
// key2vertex.insert(make_pair(key2, v2));
|
||||
// } else
|
||||
// v2 = key2vertex[key2];
|
||||
//
|
||||
// boost::property<boost::edge_weight_t, double> edge_property(1.0); // assume constant edge weight here
|
||||
// boost::add_edge(v1, v2, edge_property, g);
|
||||
// }
|
||||
//
|
||||
// return g;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class G, class V, class Key>
|
||||
|
|
|
|||
|
|
@ -57,13 +57,13 @@ namespace gtsam {
|
|||
template<class Key>
|
||||
std::list<Key> predecessorMap2Keys(const PredecessorMap<Key>& p_map);
|
||||
|
||||
/**
|
||||
* Convert the factor graph to an SDGraph
|
||||
* G = Graph type
|
||||
* F = Factor type
|
||||
* Key = Key type
|
||||
*/
|
||||
template<class G, class F, class Key> SDGraph<Key> toBoostGraph(const G& graph);
|
||||
// /**
|
||||
// * Convert the factor graph to an SDGraph
|
||||
// * G = Graph type
|
||||
// * F = Factor type
|
||||
// * Key = Key type
|
||||
// */
|
||||
// SL-NEEDED? template<class G, class F, class Key> SDGraph<Key> toBoostGraph(const G& graph);
|
||||
|
||||
/**
|
||||
* Build takes a predecessor map, and builds a directed graph corresponding to the tree.
|
||||
|
|
|
|||
|
|
@ -1,89 +1,416 @@
|
|||
/**
|
||||
* @file inference-inl.h
|
||||
* @brief inference template definitions
|
||||
* @author Frank Dellaert
|
||||
* @author Frank Dellaert, Richard Roberts
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/inference/inference.h>
|
||||
#include <gtsam/inference/FactorGraph-inl.h>
|
||||
#include <gtsam/inference/BayesNet-inl.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/colamd/ccolamd.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
#include <limits>
|
||||
#include <map>
|
||||
#include <stdexcept>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
/* eliminate one node from the factor graph */
|
||||
/* ************************************************************************* */
|
||||
template<class Factor,class Conditional>
|
||||
boost::shared_ptr<Conditional> eliminateOne(FactorGraph<Factor>& graph, const Symbol& key) {
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraph>
|
||||
inline typename FactorGraph::bayesnet_type::shared_ptr Inference::Eliminate(const FactorGraph& factorGraph) {
|
||||
|
||||
// combine the factors of all nodes connected to the variable to be eliminated
|
||||
// if no factors are connected to key, returns an empty factor
|
||||
boost::shared_ptr<Factor> joint_factor = removeAndCombineFactors(graph,key);
|
||||
// Create a copy of the factor graph to eliminate in-place
|
||||
FactorGraph eliminationGraph(factorGraph);
|
||||
typename FactorGraph::variableindex_type variableIndex(eliminationGraph);
|
||||
|
||||
// eliminate that joint factor
|
||||
boost::shared_ptr<Factor> factor;
|
||||
boost::shared_ptr<Conditional> conditional;
|
||||
boost::tie(conditional, factor) = joint_factor->eliminate(key);
|
||||
return Eliminate(eliminationGraph, variableIndex);
|
||||
}
|
||||
|
||||
// add new factor on separator back into the graph
|
||||
if (!factor->empty()) graph.push_back(factor);
|
||||
/* ************************************************************************* */
|
||||
template<class Factor>
|
||||
BayesNet<Conditional>::shared_ptr Inference::EliminateSymbolic(const FactorGraph<Factor>& factorGraph) {
|
||||
|
||||
// return the conditional Gaussian
|
||||
return conditional;
|
||||
}
|
||||
// Create a copy of the factor graph to eliminate in-place
|
||||
FactorGraph<gtsam::Factor> eliminationGraph(factorGraph);
|
||||
VariableIndex<> variableIndex(eliminationGraph);
|
||||
|
||||
/* ************************************************************************* */
|
||||
// This doubly templated function is generic. There is a GaussianFactorGraph
|
||||
// version that returns a more specific GaussianBayesNet.
|
||||
// Note, you will need to include this file to instantiate the function.
|
||||
/* ************************************************************************* */
|
||||
template<class Factor,class Conditional>
|
||||
BayesNet<Conditional> eliminate(FactorGraph<Factor>& factorGraph, const Ordering& ordering)
|
||||
{
|
||||
BayesNet<Conditional> bayesNet; // empty
|
||||
typename BayesNet<Conditional>::shared_ptr bayesnet(new BayesNet<Conditional>());
|
||||
|
||||
BOOST_FOREACH(Symbol key, ordering) {
|
||||
boost::shared_ptr<Conditional> cg = eliminateOne<Factor,Conditional>(factorGraph,key);
|
||||
bayesNet.push_back(cg);
|
||||
}
|
||||
// Eliminate variables one-by-one, updating the eliminated factor graph and
|
||||
// the variable index.
|
||||
for(varid_t var = 0; var < variableIndex.size(); ++var) {
|
||||
Conditional::shared_ptr conditional(EliminateOneSymbolic(eliminationGraph, variableIndex, var));
|
||||
if(conditional) // Will be NULL if the variable did not appear in the factor graph.
|
||||
bayesnet->push_back(conditional);
|
||||
}
|
||||
|
||||
return bayesNet;
|
||||
}
|
||||
return bayesnet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor, class Conditional>
|
||||
pair< BayesNet<Conditional>, FactorGraph<Factor> >
|
||||
factor(const BayesNet<Conditional>& bn, const Ordering& keys) {
|
||||
// Convert to factor graph
|
||||
FactorGraph<Factor> factorGraph(bn);
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraph>
|
||||
inline typename FactorGraph::bayesnet_type::shared_ptr
|
||||
Inference::Eliminate(FactorGraph& factorGraph, typename FactorGraph::variableindex_type& variableIndex) {
|
||||
|
||||
// Get the keys of all variables and remove all keys we want the marginal for
|
||||
Ordering ord = bn.ordering();
|
||||
BOOST_FOREACH(const Symbol& key, keys) ord.remove(key); // TODO: O(n*k), faster possible?
|
||||
return EliminateUntil(factorGraph, variableIndex.size(), variableIndex);
|
||||
}
|
||||
|
||||
// eliminate partially,
|
||||
BayesNet<Conditional> conditional = eliminate<Factor,Conditional>(factorGraph,ord);
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraph>
|
||||
inline typename FactorGraph::bayesnet_type::shared_ptr
|
||||
Inference::EliminateUntil(const FactorGraph& factorGraph, varid_t bound) {
|
||||
|
||||
// at this moment, the factor graph only encodes P(keys)
|
||||
return make_pair(conditional,factorGraph);
|
||||
}
|
||||
// Create a copy of the factor graph to eliminate in-place
|
||||
FactorGraph eliminationGraph(factorGraph);
|
||||
typename FactorGraph::variableindex_type variableIndex(eliminationGraph);
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Factor, class Conditional>
|
||||
FactorGraph<Factor> marginalize(const BayesNet<Conditional>& bn, const Ordering& keys) {
|
||||
return EliminateUntil(eliminationGraph, bound, variableIndex);
|
||||
}
|
||||
|
||||
// factor P(X,Y) as P(X|Y)P(Y), where Y corresponds to keys
|
||||
pair< BayesNet<Conditional>, FactorGraph<Factor> > factors =
|
||||
gtsam::factor<Factor,Conditional>(bn,keys);
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraph>
|
||||
typename FactorGraph::bayesnet_type::shared_ptr
|
||||
Inference::EliminateUntil(FactorGraph& factorGraph, varid_t bound, typename FactorGraph::variableindex_type& variableIndex) {
|
||||
|
||||
// throw away conditional, return marginal P(Y)
|
||||
return factors.second;
|
||||
}
|
||||
typename FactorGraph::bayesnet_type::shared_ptr bayesnet(new typename FactorGraph::bayesnet_type);
|
||||
|
||||
// Eliminate variables one-by-one, updating the eliminated factor graph and
|
||||
// the variable index.
|
||||
for(varid_t var = 0; var < bound; ++var) {
|
||||
typename FactorGraph::bayesnet_type::sharedConditional conditional(EliminateOne(factorGraph, variableIndex, var));
|
||||
if(conditional) // Will be NULL if the variable did not appear in the factor graph.
|
||||
bayesnet->push_back(conditional);
|
||||
}
|
||||
|
||||
return bayesnet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class FactorGraph>
|
||||
typename FactorGraph::bayesnet_type::sharedConditional
|
||||
Inference::EliminateOne(FactorGraph& factorGraph, typename FactorGraph::variableindex_type& variableIndex, varid_t var) {
|
||||
|
||||
/* This function performs symbolic elimination of a variable, comprising
|
||||
* combining involved factors (analogous to "assembly" in SPQR) followed by
|
||||
* eliminating to an upper-trapezoidal factor using spqr_front. This
|
||||
* function performs the bookkeeping necessary for high performance.
|
||||
*
|
||||
* When combining factors, variables are merge sorted so that they remain
|
||||
* in elimination order in the combined factor. GaussianFactor combines
|
||||
* rows such that the row index after the last structural non-zero in each
|
||||
* column increases monotonically (referred to as the "staircase" pattern in
|
||||
* SPQR). The variable ordering is passed into the factor's Combine(...)
|
||||
* function, which does the work of actually building the combined factor
|
||||
* (for a GaussianFactor this assembles the augmented matrix).
|
||||
*
|
||||
* Next, this function calls the factor's eliminateFirst() function, which
|
||||
* factorizes the factor into a conditional on the first variable and a
|
||||
* factor on the remaining variables. In addition, this function updates the
|
||||
* bookkeeping of the pattern of structural non-zeros. The GaussianFactor
|
||||
* calls spqr_front during eliminateFirst(), which reduces its matrix to
|
||||
* upper-trapezoidal form.
|
||||
*
|
||||
* Returns NULL if the variable does not appear in factorGraph.
|
||||
*/
|
||||
|
||||
tic("EliminateOne");
|
||||
|
||||
// Get the factors involving the eliminated variable
|
||||
typename FactorGraph::variableindex_type::mapped_type& varIndexEntry(variableIndex[var]);
|
||||
typedef typename FactorGraph::variableindex_type::mapped_factor_type mapped_factor_type;
|
||||
|
||||
if(!varIndexEntry.empty()) {
|
||||
|
||||
vector<size_t> removedFactors(varIndexEntry.size());
|
||||
transform(varIndexEntry.begin(), varIndexEntry.end(), removedFactors.begin(),
|
||||
boost::lambda::bind(&FactorGraph::variableindex_type::mapped_factor_type::factorIndex, boost::lambda::_1));
|
||||
|
||||
// The new joint factor will be the last one in the factor graph
|
||||
size_t jointFactorIndex = factorGraph.size();
|
||||
|
||||
static const bool debug = false;
|
||||
|
||||
if(debug) {
|
||||
cout << "Eliminating " << var;
|
||||
factorGraph.print(" from graph: ");
|
||||
cout << removedFactors.size() << " factors to remove" << endl;
|
||||
}
|
||||
|
||||
// Compute the involved keys, uses the variableIndex to mark whether each
|
||||
// key has been added yet, but the positions stored in the variableIndex are
|
||||
// from the unsorted positions and will be fixed later.
|
||||
tic("EliminateOne: Find involved vars");
|
||||
map<varid_t, size_t, std::less<varid_t>, boost::fast_pool_allocator<pair<const varid_t,size_t> > > involvedKeys; // Variable and original order as discovered
|
||||
BOOST_FOREACH(size_t removedFactorI, removedFactors) {
|
||||
if(debug) cout << removedFactorI << " is involved" << endl;
|
||||
// If the factor has not previously been removed
|
||||
if(removedFactorI < factorGraph.size() && factorGraph[removedFactorI]) {
|
||||
// Loop over the variables involved in the removed factor to update the
|
||||
// variable index and joint factor positions of each variable.
|
||||
BOOST_FOREACH(varid_t involvedVariable, factorGraph[removedFactorI]->keys()) {
|
||||
// Mark the new joint factor as involving each variable in the removed factor.
|
||||
assert(!variableIndex[involvedVariable].empty());
|
||||
if(variableIndex[involvedVariable].back().factorIndex != jointFactorIndex) {
|
||||
if(debug) cout << " pulls in variable " << involvedVariable << endl;
|
||||
size_t varpos = involvedKeys.size();
|
||||
variableIndex[involvedVariable].push_back(mapped_factor_type(jointFactorIndex, varpos));
|
||||
#ifndef NDEBUG
|
||||
bool inserted =
|
||||
#endif
|
||||
involvedKeys.insert(make_pair(involvedVariable, varpos)).second;
|
||||
assert(inserted);
|
||||
} else if(debug)
|
||||
cout << " involves variable " << involvedVariable << " which was previously discovered" << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
toc("EliminateOne: Find involved vars");
|
||||
if(debug) cout << removedFactors.size() << " factors to remove" << endl;
|
||||
|
||||
// Compute the permutation to go from the original varpos to the sorted
|
||||
// joint factor varpos
|
||||
if(debug) cout << "Sorted keys:";
|
||||
tic("EliminateOne: Sort involved vars");
|
||||
vector<size_t> varposPermutation(involvedKeys.size(), numeric_limits<size_t>::max());
|
||||
vector<varid_t> sortedKeys(involvedKeys.size());
|
||||
{
|
||||
size_t sortedVarpos = 0;
|
||||
const map<varid_t, size_t, std::less<varid_t>, boost::fast_pool_allocator<pair<const varid_t,size_t> > >& involvedKeysC(involvedKeys);
|
||||
for(map<varid_t, size_t, std::less<varid_t>, boost::fast_pool_allocator<pair<const varid_t,size_t> > >::const_iterator key_pos=involvedKeysC.begin(); key_pos!=involvedKeysC.end(); ++key_pos) {
|
||||
sortedKeys[sortedVarpos] = key_pos->first;
|
||||
assert(varposPermutation[key_pos->second] == numeric_limits<size_t>::max());
|
||||
varposPermutation[key_pos->second] = sortedVarpos;
|
||||
if(debug) cout << " " << key_pos->first << " (" << key_pos->second << "->" << sortedVarpos << ") ";
|
||||
++ sortedVarpos;
|
||||
}
|
||||
}
|
||||
toc("EliminateOne: Sort involved vars");
|
||||
if(debug) cout << endl;
|
||||
|
||||
assert(sortedKeys.front() == var);
|
||||
if(debug) cout << removedFactors.size() << " factors to remove" << endl;
|
||||
|
||||
// Fix the variable positions in the variableIndex
|
||||
tic("EliminateOne: Fix varIndex");
|
||||
for(size_t sortedPos=0; sortedPos<sortedKeys.size(); ++sortedPos) {
|
||||
varid_t var = sortedKeys[sortedPos];
|
||||
assert(!variableIndex[var].empty());
|
||||
assert(variableIndex[var].back().factorIndex == jointFactorIndex);
|
||||
assert(sortedPos == varposPermutation[variableIndex[var].back().variablePosition]);
|
||||
if(debug) cout << "Fixing " << var << " " << variableIndex[var].back().variablePosition << "->" << sortedPos << endl;
|
||||
variableIndex[var].back().variablePosition = sortedPos;
|
||||
}
|
||||
toc("EliminateOne: Fix varIndex");
|
||||
|
||||
// Fill in the jointFactorPositions
|
||||
tic("EliminateOne: Fill jointFactorPositions");
|
||||
vector<size_t> removedFactorIdxs;
|
||||
removedFactorIdxs.reserve(removedFactors.size());
|
||||
vector<vector<size_t> > jointFactorPositions;
|
||||
jointFactorPositions.reserve(removedFactors.size());
|
||||
if(debug) cout << removedFactors.size() << " factors to remove" << endl;
|
||||
BOOST_FOREACH(size_t removedFactorI, removedFactors) {
|
||||
if(debug) cout << "Fixing variable positions for factor " << removedFactorI << endl;
|
||||
// If the factor has not previously been removed
|
||||
if(removedFactorI < factorGraph.size() && factorGraph[removedFactorI]) {
|
||||
|
||||
// Allocate space
|
||||
jointFactorPositions.push_back(vector<size_t>());
|
||||
vector<size_t>& jointFactorPositionsCur(jointFactorPositions.back());
|
||||
jointFactorPositionsCur.reserve(factorGraph[removedFactorI]->keys().size());
|
||||
removedFactorIdxs.push_back(removedFactorI);
|
||||
|
||||
// Loop over the variables involved in the removed factor to update the
|
||||
// variable index and joint factor positions of each variable.
|
||||
BOOST_FOREACH(varid_t involvedVariable, factorGraph[removedFactorI]->keys()) {
|
||||
// Mark the new joint factor as involving each variable in the removed factor
|
||||
assert(!variableIndex[involvedVariable].empty());
|
||||
assert(variableIndex[involvedVariable].back().factorIndex == jointFactorIndex);
|
||||
const size_t varpos = variableIndex[involvedVariable].back().variablePosition;
|
||||
jointFactorPositionsCur.push_back(varpos);
|
||||
if(debug) cout << "Variable " << involvedVariable << " from factor " << removedFactorI;
|
||||
if(debug) cout << " goes in position " << varpos << " of the joint factor" << endl;
|
||||
assert(sortedKeys[varpos] == involvedVariable);
|
||||
}
|
||||
}
|
||||
}
|
||||
toc("EliminateOne: Fill jointFactorPositions");
|
||||
|
||||
// Join the factors and eliminate the variable from the joint factor
|
||||
tic("EliminateOne: Combine");
|
||||
typename FactorGraph::sharedFactor jointFactor(FactorGraph::factor_type::Combine(factorGraph, variableIndex, removedFactorIdxs, sortedKeys, jointFactorPositions));
|
||||
toc("EliminateOne: Combine");
|
||||
|
||||
// Remove the original factors
|
||||
BOOST_FOREACH(size_t removedFactorI, removedFactors) {
|
||||
if(removedFactorI < factorGraph.size() && factorGraph[removedFactorI])
|
||||
factorGraph.remove(removedFactorI);
|
||||
}
|
||||
|
||||
typename FactorGraph::bayesnet_type::sharedConditional conditional;
|
||||
tic("EliminateOne: eliminateFirst");
|
||||
conditional = jointFactor->eliminateFirst(); // Eliminate the first variable in-place
|
||||
toc("EliminateOne: eliminateFirst");
|
||||
tic("EliminateOne: store eliminated");
|
||||
variableIndex[sortedKeys.front()].pop_back(); // Unmark the joint factor from involving the eliminated variable
|
||||
factorGraph.push_back(jointFactor); // Put the eliminated factor into the factor graph
|
||||
toc("EliminateOne: store eliminated");
|
||||
|
||||
toc("EliminateOne");
|
||||
|
||||
return conditional;
|
||||
|
||||
} else { // varIndexEntry.empty()
|
||||
toc("EliminateOne");
|
||||
return typename FactorGraph::bayesnet_type::sharedConditional();
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class VariableIndexType, typename ConstraintContainer>
|
||||
Permutation::shared_ptr Inference::PermutationCOLAMD(const VariableIndexType& variableIndex, const ConstraintContainer& constrainLast) {
|
||||
size_t nEntries = variableIndex.nEntries(), nFactors = variableIndex.nFactors(), nVars = variableIndex.size();
|
||||
// Convert to compressed column major format colamd wants it in (== MATLAB format!)
|
||||
int Alen = ccolamd_recommended(nEntries, nFactors, nVars); /* colamd arg 3: size of the array A */
|
||||
int * A = new int[Alen]; /* colamd arg 4: row indices of A, of size Alen */
|
||||
int * p = new int[nVars + 1]; /* colamd arg 5: column pointers of A, of size n_col+1 */
|
||||
int * cmember = new int[nVars]; /* Constraint set of A, of size n_col */
|
||||
|
||||
static const bool debug = false;
|
||||
|
||||
p[0] = 0;
|
||||
int count = 0;
|
||||
for(varid_t var = 0; var < variableIndex.size(); ++var) {
|
||||
const typename VariableIndexType::mapped_type& column(variableIndex[var]);
|
||||
size_t lastFactorId = numeric_limits<size_t>::max();
|
||||
BOOST_FOREACH(const typename VariableIndexType::mapped_factor_type& factor_pos, column) {
|
||||
if(lastFactorId != numeric_limits<size_t>::max())
|
||||
assert(factor_pos.factorIndex > lastFactorId);
|
||||
A[count++] = factor_pos.factorIndex; // copy sparse column
|
||||
if(debug) cout << "A[" << count-1 << "] = " << factor_pos.factorIndex << endl;
|
||||
}
|
||||
p[var+1] = count; // column j (base 1) goes from A[j-1] to A[j]-1
|
||||
cmember[var] = 0;
|
||||
}
|
||||
|
||||
// If at least some variables are not constrained to be last, constrain the
|
||||
// ones that should be constrained.
|
||||
if(constrainLast.size() < variableIndex.size()) {
|
||||
BOOST_FOREACH(varid_t var, constrainLast) {
|
||||
assert(var < nVars);
|
||||
cmember[var] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
assert((size_t)count == variableIndex.nEntries());
|
||||
|
||||
if(debug)
|
||||
for(size_t i=0; i<nVars+1; ++i)
|
||||
cout << "p[" << i << "] = " << p[i] << endl;
|
||||
|
||||
double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */
|
||||
int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */
|
||||
|
||||
// call colamd, result will be in p
|
||||
/* returns (1) if successful, (0) otherwise*/
|
||||
int rv = ccolamd(nFactors, nVars, Alen, A, p, knobs, stats, cmember);
|
||||
if(rv != 1)
|
||||
throw runtime_error((boost::format("ccolamd failed with return value %1%")%rv).str());
|
||||
delete[] A; // delete symbolic A
|
||||
delete[] cmember;
|
||||
|
||||
// Convert elimination ordering in p to an ordering
|
||||
Permutation::shared_ptr permutation(new Permutation(nVars));
|
||||
for (varid_t j = 0; j < nVars; j++) {
|
||||
permutation->operator[](j) = p[j];
|
||||
if(debug) cout << "COLAMD: " << j << "->" << p[j] << endl;
|
||||
}
|
||||
if(debug) cout << "COLAMD: p[" << nVars << "] = " << p[nVars] << endl;
|
||||
delete[] p; // delete colamd result vector
|
||||
|
||||
return permutation;
|
||||
}
|
||||
|
||||
|
||||
// /* ************************************************************************* */
|
||||
// /* eliminate one node from the factor graph */
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor,class Conditional>
|
||||
// boost::shared_ptr<Conditional> eliminateOne(FactorGraph<Factor>& graph, varid_t key) {
|
||||
//
|
||||
// // combine the factors of all nodes connected to the variable to be eliminated
|
||||
// // if no factors are connected to key, returns an empty factor
|
||||
// boost::shared_ptr<Factor> joint_factor = removeAndCombineFactors(graph,key);
|
||||
//
|
||||
// // eliminate that joint factor
|
||||
// boost::shared_ptr<Factor> factor;
|
||||
// boost::shared_ptr<Conditional> conditional;
|
||||
// boost::tie(conditional, factor) = joint_factor->eliminate(key);
|
||||
//
|
||||
// // add new factor on separator back into the graph
|
||||
// if (!factor->empty()) graph.push_back(factor);
|
||||
//
|
||||
// // return the conditional Gaussian
|
||||
// return conditional;
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// // This doubly templated function is generic. There is a GaussianFactorGraph
|
||||
// // version that returns a more specific GaussianBayesNet.
|
||||
// // Note, you will need to include this file to instantiate the function.
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor,class Conditional>
|
||||
// BayesNet<Conditional> eliminate(FactorGraph<Factor>& factorGraph, const Ordering& ordering)
|
||||
// {
|
||||
// BayesNet<Conditional> bayesNet; // empty
|
||||
//
|
||||
// BOOST_FOREACH(varid_t key, ordering) {
|
||||
// boost::shared_ptr<Conditional> cg = eliminateOne<Factor,Conditional>(factorGraph,key);
|
||||
// bayesNet.push_back(cg);
|
||||
// }
|
||||
//
|
||||
// return bayesNet;
|
||||
// }
|
||||
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor, class Conditional>
|
||||
// pair< BayesNet<Conditional>, FactorGraph<Factor> >
|
||||
// factor(const BayesNet<Conditional>& bn, const Ordering& keys) {
|
||||
// // Convert to factor graph
|
||||
// FactorGraph<Factor> factorGraph(bn);
|
||||
//
|
||||
// // Get the keys of all variables and remove all keys we want the marginal for
|
||||
// Ordering ord = bn.ordering();
|
||||
// BOOST_FOREACH(varid_t key, keys) ord.remove(key); // TODO: O(n*k), faster possible?
|
||||
//
|
||||
// // eliminate partially,
|
||||
// BayesNet<Conditional> conditional = eliminate<Factor,Conditional>(factorGraph,ord);
|
||||
//
|
||||
// // at this moment, the factor graph only encodes P(keys)
|
||||
// return make_pair(conditional,factorGraph);
|
||||
// }
|
||||
//
|
||||
// /* ************************************************************************* */
|
||||
// template<class Factor, class Conditional>
|
||||
// FactorGraph<Factor> marginalize(const BayesNet<Conditional>& bn, const Ordering& keys) {
|
||||
//
|
||||
// // factor P(X,Y) as P(X|Y)P(Y), where Y corresponds to keys
|
||||
// pair< BayesNet<Conditional>, FactorGraph<Factor> > factors =
|
||||
// gtsam::factor<Factor,Conditional>(bn,keys);
|
||||
//
|
||||
// // throw away conditional, return marginal P(Y)
|
||||
// return factors.second;
|
||||
// }
|
||||
|
||||
/* ************************************************************************* */
|
||||
// pair<Vector,Matrix> marginalGaussian(const GaussianFactorGraph& fg, const Symbol& key) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
/**
|
||||
* @file inference-inl.h
|
||||
* @brief inference definitions
|
||||
* @author Frank Dellaert, Richard Roberts
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
Conditional::shared_ptr
|
||||
Inference::EliminateOneSymbolic(FactorGraph<Factor>& factorGraph, VariableIndex<>& variableIndex, varid_t var) {
|
||||
|
||||
tic("EliminateOne");
|
||||
|
||||
// Get the factors involving the eliminated variable
|
||||
typename VariableIndex<>::mapped_type& varIndexEntry(variableIndex[var]);
|
||||
typedef typename VariableIndex<>::mapped_factor_type mapped_factor_type;
|
||||
|
||||
if(!varIndexEntry.empty()) {
|
||||
|
||||
vector<size_t> removedFactors(varIndexEntry.size());
|
||||
transform(varIndexEntry.begin(), varIndexEntry.end(), removedFactors.begin(),
|
||||
boost::lambda::bind(&VariableIndex<>::mapped_factor_type::factorIndex, boost::lambda::_1));
|
||||
|
||||
// The new joint factor will be the last one in the factor graph
|
||||
size_t jointFactorIndex = factorGraph.size();
|
||||
|
||||
static const bool debug = false;
|
||||
|
||||
if(debug) {
|
||||
cout << "Eliminating " << var;
|
||||
factorGraph.print(" from graph: ");
|
||||
cout << removedFactors.size() << " factors to remove" << endl;
|
||||
}
|
||||
|
||||
// Compute the involved keys, uses the variableIndex to mark whether each
|
||||
// key has been added yet, but the positions stored in the variableIndex are
|
||||
// from the unsorted positions and will be fixed later.
|
||||
tic("EliminateOne: Find involved vars");
|
||||
typedef set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> > InvolvedKeys;
|
||||
InvolvedKeys involvedKeys;
|
||||
BOOST_FOREACH(size_t removedFactorI, removedFactors) {
|
||||
if(debug) cout << removedFactorI << " is involved" << endl;
|
||||
// If the factor has not previously been removed
|
||||
if(removedFactorI < factorGraph.size() && factorGraph[removedFactorI]) {
|
||||
// Loop over the variables involved in the removed factor to update the
|
||||
// variable index and joint factor positions of each variable.
|
||||
BOOST_FOREACH(varid_t involvedVariable, factorGraph[removedFactorI]->keys()) {
|
||||
if(debug) cout << " pulls in variable " << involvedVariable << endl;
|
||||
// Mark the new joint factor as involving each variable in the removed factor.
|
||||
assert(!variableIndex[involvedVariable].empty());
|
||||
involvedKeys.insert(involvedVariable);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the original factor
|
||||
factorGraph.remove(removedFactorI);
|
||||
}
|
||||
|
||||
// We need only mark the next variable to be eliminated as involved with the joint factor
|
||||
if(involvedKeys.size() > 1) {
|
||||
InvolvedKeys::const_iterator next = involvedKeys.begin(); ++ next;
|
||||
variableIndex[*next].push_back(mapped_factor_type(jointFactorIndex,0));
|
||||
}
|
||||
toc("EliminateOne: Find involved vars");
|
||||
if(debug) cout << removedFactors.size() << " factors to remove" << endl;
|
||||
|
||||
// Join the factors and eliminate the variable from the joint factor
|
||||
tic("EliminateOne: Combine");
|
||||
Conditional::shared_ptr conditional(new Conditional(involvedKeys.begin(), involvedKeys.end(), 1));
|
||||
Factor::shared_ptr eliminated(new Factor(conditional->beginParents(), conditional->endParents()));
|
||||
toc("EliminateOne: Combine");
|
||||
|
||||
tic("EliminateOne: store eliminated");
|
||||
factorGraph.push_back(eliminated); // Put the eliminated factor into the factor graph
|
||||
toc("EliminateOne: store eliminated");
|
||||
|
||||
toc("EliminateOne");
|
||||
|
||||
return conditional;
|
||||
|
||||
} else { // varIndexEntry.empty()
|
||||
toc("EliminateOne");
|
||||
return Conditional::shared_ptr();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -2,54 +2,143 @@
|
|||
* @file inference.h
|
||||
* @brief Contains *generic* inference algorithms that convert between templated
|
||||
* graphical models, i.e., factor graphs, Bayes nets, and Bayes trees
|
||||
* @author Frank Dellaert
|
||||
* @author Frank Dellaert, Richard Roberts
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
#include <gtsam/inference/Permutation.h>
|
||||
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
class GaussianFactorGraph;
|
||||
class Factor;
|
||||
class Conditional;
|
||||
|
||||
class Inference {
|
||||
private:
|
||||
/* Static members only, private constructor */
|
||||
Inference() {}
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Eliminate a factor graph in its natural ordering, i.e. eliminating
|
||||
* variables in order starting from 0.
|
||||
*/
|
||||
template<class FactorGraph>
|
||||
static typename FactorGraph::bayesnet_type::shared_ptr Eliminate(const FactorGraph& factorGraph);
|
||||
|
||||
/**
|
||||
* Eliminate a factor graph in its natural ordering, i.e. eliminating
|
||||
* variables in order starting from 0. Special fast version for symbolic
|
||||
* elimination.
|
||||
*/
|
||||
template<class Factor>
|
||||
static BayesNet<Conditional>::shared_ptr EliminateSymbolic(const FactorGraph<Factor>& factorGraph);
|
||||
|
||||
/**
|
||||
* Eliminate a factor graph in its natural ordering, i.e. eliminating
|
||||
* variables in order starting from 0. Uses an existing
|
||||
* variable index instead of building one from scratch.
|
||||
*/
|
||||
template<class FactorGraph>
|
||||
static typename FactorGraph::bayesnet_type::shared_ptr Eliminate(
|
||||
FactorGraph& factorGraph, typename FactorGraph::variableindex_type& variableIndex);
|
||||
|
||||
/**
|
||||
* Partially eliminate a factor graph, up to but not including the given
|
||||
* variable.
|
||||
*/
|
||||
template<class FactorGraph>
|
||||
static typename FactorGraph::bayesnet_type::shared_ptr
|
||||
EliminateUntil(const FactorGraph& factorGraph, varid_t bound);
|
||||
|
||||
/**
|
||||
* Partially eliminate a factor graph, up to but not including the given
|
||||
* variable. Use an existing variable index instead of building one from
|
||||
* scratch.
|
||||
*/
|
||||
template<class FactorGraph>
|
||||
static typename FactorGraph::bayesnet_type::shared_ptr
|
||||
EliminateUntil(FactorGraph& factorGraph, varid_t bound, typename FactorGraph::variableindex_type& variableIndex);
|
||||
|
||||
/**
|
||||
* Eliminate a single variable, updating an existing factor graph and
|
||||
* variable index.
|
||||
*/
|
||||
template<class FactorGraph>
|
||||
static typename FactorGraph::bayesnet_type::sharedConditional
|
||||
EliminateOne(FactorGraph& factorGraph, typename FactorGraph::variableindex_type& variableIndex, varid_t var);
|
||||
|
||||
/**
|
||||
* Eliminate a single variable, updating an existing factor graph and
|
||||
* variable index. This is a specialized faster version for purely
|
||||
* symbolic factor graphs.
|
||||
*/
|
||||
static boost::shared_ptr<Conditional>
|
||||
EliminateOneSymbolic(FactorGraph<Factor>& factorGraph, VariableIndex<>& variableIndex, varid_t var);
|
||||
|
||||
/**
|
||||
* Compute a permutation (variable ordering) using colamd
|
||||
*/
|
||||
template<class VariableIndexType>
|
||||
static boost::shared_ptr<Permutation> PermutationCOLAMD(const VariableIndexType& variableIndex) { return PermutationCOLAMD(variableIndex, std::vector<varid_t>()); }
|
||||
template<class VariableIndexType, typename ConstraintContainer>
|
||||
static boost::shared_ptr<Permutation> PermutationCOLAMD(const VariableIndexType& variableIndex, const ConstraintContainer& constrainLast);
|
||||
|
||||
// /**
|
||||
// * Join several factors into one. This involves determining the set of
|
||||
// * shared variables and the correct variable positions in the new joint
|
||||
// * factor.
|
||||
// */
|
||||
// template<class FactorGraph, typename InputIterator>
|
||||
// static typename FactorGraph::shared_factor Combine(const FactorGraph& factorGraph,
|
||||
// InputIterator indicesBegin, InputIterator indicesEnd);
|
||||
|
||||
|
||||
};
|
||||
|
||||
// ELIMINATE: FACTOR GRAPH -> BAYES NET
|
||||
|
||||
/**
|
||||
* Eliminate a single node yielding a Conditional
|
||||
* Eliminates the factors from the factor graph through findAndRemoveFactors
|
||||
* and adds a new factor on the separator to the factor graph
|
||||
*/
|
||||
template<class Factor, class Conditional>
|
||||
boost::shared_ptr<Conditional>
|
||||
eliminateOne(FactorGraph<Factor>& factorGraph, const Symbol& key);
|
||||
|
||||
/**
|
||||
* eliminate factor graph using the given (not necessarily complete)
|
||||
* ordering, yielding a chordal Bayes net and (partially eliminated) FG
|
||||
*/
|
||||
template<class Factor, class Conditional>
|
||||
BayesNet<Conditional> eliminate(FactorGraph<Factor>& factorGraph, const Ordering& ordering);
|
||||
// /**
|
||||
// * Eliminate a single node yielding a Conditional
|
||||
// * Eliminates the factors from the factor graph through findAndRemoveFactors
|
||||
// * and adds a new factor on the separator to the factor graph
|
||||
// */
|
||||
// template<class Factor, class Conditional>
|
||||
// boost::shared_ptr<Conditional>
|
||||
// eliminateOne(FactorGraph<Factor>& factorGraph, varid_t key);
|
||||
//
|
||||
// /**
|
||||
// * eliminate factor graph using the given (not necessarily complete)
|
||||
// * ordering, yielding a chordal Bayes net and (partially eliminated) FG
|
||||
// */
|
||||
// template<class Factor, class Conditional>
|
||||
// BayesNet<Conditional> eliminate(FactorGraph<Factor>& factorGraph, const Ordering& ordering);
|
||||
|
||||
// FACTOR/MARGINALIZE: BAYES NET -> FACTOR GRAPH
|
||||
|
||||
/**
|
||||
* Factor P(X) as P(not keys|keys) P(keys)
|
||||
* @return P(not keys|keys) as an incomplete BayesNet, and P(keys) as a factor graph
|
||||
*/
|
||||
template<class Factor, class Conditional>
|
||||
std::pair< BayesNet<Conditional>, FactorGraph<Factor> >
|
||||
factor(const BayesNet<Conditional>& bn, const Ordering& keys);
|
||||
|
||||
/**
|
||||
* integrate out all except ordering, might be inefficient as the ordering
|
||||
* will simply be the current ordering with the keys put in the back
|
||||
*/
|
||||
template<class Factor, class Conditional>
|
||||
FactorGraph<Factor> marginalize(const BayesNet<Conditional>& bn, const Ordering& keys);
|
||||
// /**
|
||||
// * Factor P(X) as P(not keys|keys) P(keys)
|
||||
// * @return P(not keys|keys) as an incomplete BayesNet, and P(keys) as a factor graph
|
||||
// */
|
||||
// template<class Factor, class Conditional>
|
||||
// std::pair< BayesNet<Conditional>, FactorGraph<Factor> >
|
||||
// factor(const BayesNet<Conditional>& bn, const Ordering& keys);
|
||||
//
|
||||
// /**
|
||||
// * integrate out all except ordering, might be inefficient as the ordering
|
||||
// * will simply be the current ordering with the keys put in the back
|
||||
// */
|
||||
// template<class Factor, class Conditional>
|
||||
// FactorGraph<Factor> marginalize(const BayesNet<Conditional>& bn, const Ordering& keys);
|
||||
|
||||
/**
|
||||
* Hacked-together function to compute a Gaussian marginal for the given variable.
|
||||
|
|
|
|||
|
|
@ -10,61 +10,61 @@
|
|||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/BayesTree-inl.h>
|
||||
#include <gtsam/inference/IndexTable.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
|
||||
using namespace gtsam;
|
||||
|
||||
typedef BayesTree<SymbolicConditional> SymbolicBayesTree;
|
||||
typedef BayesTree<Conditional> SymbolicBayesTree;
|
||||
|
||||
/* ************************************************************************* */
|
||||
// SLAM example from RSS sqrtSAM paper
|
||||
Symbol _x1_('x', 1), _x2_('x', 2), _x3_('x', 3), _l1_('l', 1), _l2_('l', 2);
|
||||
SymbolicConditional::shared_ptr
|
||||
x3(new SymbolicConditional(_x3_)),
|
||||
x2(new SymbolicConditional(_x2_,_x3_)),
|
||||
x1(new SymbolicConditional(_x1_,_x2_,_x3_)),
|
||||
l1(new SymbolicConditional(_l1_,_x1_,_x2_)),
|
||||
l2(new SymbolicConditional(_l2_,_x1_,_x3_));
|
||||
|
||||
// Bayes Tree for sqrtSAM example
|
||||
SymbolicBayesTree createSlamSymbolicBayesTree(){
|
||||
// Create using insert
|
||||
Ordering slamOrdering; slamOrdering += _x3_, _x2_, _x1_, _l2_, _l1_;
|
||||
SymbolicBayesTree bayesTree_slam;
|
||||
bayesTree_slam.insert(x3,slamOrdering);
|
||||
bayesTree_slam.insert(x2,slamOrdering);
|
||||
bayesTree_slam.insert(x1,slamOrdering);
|
||||
bayesTree_slam.insert(l2,slamOrdering);
|
||||
bayesTree_slam.insert(l1,slamOrdering);
|
||||
return bayesTree_slam;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//// SLAM example from RSS sqrtSAM paper
|
||||
static const varid_t _x3_=0, _x2_=1, _x1_=2, _l2_=3, _l1_=4;
|
||||
//Conditional::shared_ptr
|
||||
// x3(new Conditional(_x3_)),
|
||||
// x2(new Conditional(_x2_,_x3_)),
|
||||
// x1(new Conditional(_x1_,_x2_,_x3_)),
|
||||
// l1(new Conditional(_l1_,_x1_,_x2_)),
|
||||
// l2(new Conditional(_l2_,_x1_,_x3_));
|
||||
//
|
||||
//// Bayes Tree for sqrtSAM example
|
||||
//SymbolicBayesTree createSlamSymbolicBayesTree(){
|
||||
// // Create using insert
|
||||
//// Ordering slamOrdering; slamOrdering += _x3_, _x2_, _x1_, _l2_, _l1_;
|
||||
// SymbolicBayesTree bayesTree_slam;
|
||||
// bayesTree_slam.insert(x3);
|
||||
// bayesTree_slam.insert(x2);
|
||||
// bayesTree_slam.insert(x1);
|
||||
// bayesTree_slam.insert(l2);
|
||||
// bayesTree_slam.insert(l1);
|
||||
// return bayesTree_slam;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Conditionals for ASIA example from the tutorial with A and D evidence
|
||||
Symbol _B_('B', 0), _L_('L', 0), _E_('E', 0), _S_('S', 0), _T_('T', 0), _X_('X',0);
|
||||
SymbolicConditional::shared_ptr
|
||||
B(new SymbolicConditional(_B_)),
|
||||
L(new SymbolicConditional(_L_, _B_)),
|
||||
E(new SymbolicConditional(_E_, _B_, _L_)),
|
||||
S(new SymbolicConditional(_S_, _L_, _B_)),
|
||||
T(new SymbolicConditional(_T_, _E_, _L_)),
|
||||
X(new SymbolicConditional(_X_, _E_));
|
||||
static const varid_t _X_=0, _T_=1, _S_=2, _E_=3, _L_=4, _B_=5;
|
||||
Conditional::shared_ptr
|
||||
B(new Conditional(_B_)),
|
||||
L(new Conditional(_L_, _B_)),
|
||||
E(new Conditional(_E_, _L_, _B_)),
|
||||
S(new Conditional(_S_, _L_, _B_)),
|
||||
T(new Conditional(_T_, _E_, _L_)),
|
||||
X(new Conditional(_X_, _E_));
|
||||
|
||||
// Bayes Tree for Asia example
|
||||
SymbolicBayesTree createAsiaSymbolicBayesTree() {
|
||||
SymbolicBayesTree bayesTree;
|
||||
Ordering asiaOrdering; asiaOrdering += _X_, _T_, _S_, _E_, _L_, _B_;
|
||||
bayesTree.insert(B,asiaOrdering);
|
||||
bayesTree.insert(L,asiaOrdering);
|
||||
bayesTree.insert(E,asiaOrdering);
|
||||
bayesTree.insert(S,asiaOrdering);
|
||||
bayesTree.insert(T,asiaOrdering);
|
||||
bayesTree.insert(X,asiaOrdering);
|
||||
// Ordering asiaOrdering; asiaOrdering += _X_, _T_, _S_, _E_, _L_, _B_;
|
||||
bayesTree.insert(B);
|
||||
bayesTree.insert(L);
|
||||
bayesTree.insert(E);
|
||||
bayesTree.insert(S);
|
||||
bayesTree.insert(T);
|
||||
bayesTree.insert(X);
|
||||
return bayesTree;
|
||||
}
|
||||
|
||||
|
|
@ -78,15 +78,15 @@ TEST( BayesTree, constructor )
|
|||
LONGS_EQUAL(4,bayesTree.size());
|
||||
|
||||
// Check root
|
||||
BayesNet<SymbolicConditional> expected_root;
|
||||
BayesNet<Conditional> expected_root;
|
||||
expected_root.push_back(E);
|
||||
expected_root.push_back(L);
|
||||
expected_root.push_back(B);
|
||||
boost::shared_ptr<SymbolicBayesNet> actual_root = bayesTree.root();
|
||||
boost::shared_ptr<BayesNet<Conditional> > actual_root = bayesTree.root();
|
||||
CHECK(assert_equal(expected_root,*actual_root));
|
||||
|
||||
// Create from symbolic Bayes chain in which we want to discover cliques
|
||||
SymbolicBayesNet ASIA;
|
||||
BayesNet<Conditional> ASIA;
|
||||
ASIA.push_back(X);
|
||||
ASIA.push_back(T);
|
||||
ASIA.push_back(S);
|
||||
|
|
@ -99,17 +99,17 @@ TEST( BayesTree, constructor )
|
|||
CHECK(assert_equal(bayesTree,bayesTree2));
|
||||
|
||||
// CHECK findParentClique, should *not depend on order of parents*
|
||||
Ordering ordering; ordering += _X_, _T_, _S_, _E_, _L_, _B_;
|
||||
IndexTable<Symbol> index(ordering);
|
||||
// Ordering ordering; ordering += _X_, _T_, _S_, _E_, _L_, _B_;
|
||||
// IndexTable<Symbol> index(ordering);
|
||||
|
||||
list<Symbol> parents1; parents1 += _E_, _L_;
|
||||
CHECK(assert_equal(_E_,bayesTree.findParentClique(parents1, index)));
|
||||
list<varid_t> parents1; parents1 += _E_, _L_;
|
||||
CHECK(assert_equal(_E_, bayesTree.findParentClique(parents1)));
|
||||
|
||||
list<Symbol> parents2; parents2 += _L_, _E_;
|
||||
CHECK(assert_equal(_E_,bayesTree.findParentClique(parents2, index)));
|
||||
list<varid_t> parents2; parents2 += _L_, _E_;
|
||||
CHECK(assert_equal(_E_, bayesTree.findParentClique(parents2)));
|
||||
|
||||
list<Symbol> parents3; parents3 += _L_, _B_;
|
||||
CHECK(assert_equal(_L_,bayesTree.findParentClique(parents3, index)));
|
||||
list<varid_t> parents3; parents3 += _L_, _B_;
|
||||
CHECK(assert_equal(_L_, bayesTree.findParentClique(parents3)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -129,53 +129,54 @@ Bayes Tree for testing conversion to a forest of orphans needed for incremental.
|
|||
A,B
|
||||
C|A E|B
|
||||
D|C F|E
|
||||
*/
|
||||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removePath )
|
||||
{
|
||||
Symbol _A_('A', 0), _B_('B', 0), _C_('C', 0), _D_('D', 0), _E_('E', 0), _F_('F',0);
|
||||
SymbolicConditional::shared_ptr
|
||||
A(new SymbolicConditional(_A_)),
|
||||
B(new SymbolicConditional(_B_, _A_)),
|
||||
C(new SymbolicConditional(_C_, _A_)),
|
||||
D(new SymbolicConditional(_D_, _C_)),
|
||||
E(new SymbolicConditional(_E_, _B_)),
|
||||
F(new SymbolicConditional(_F_, _E_));
|
||||
const varid_t _A_=5, _B_=4, _C_=3, _D_=2, _E_=1, _F_=0;
|
||||
Conditional::shared_ptr
|
||||
A(new Conditional(_A_)),
|
||||
B(new Conditional(_B_, _A_)),
|
||||
C(new Conditional(_C_, _A_)),
|
||||
D(new Conditional(_D_, _C_)),
|
||||
E(new Conditional(_E_, _B_)),
|
||||
F(new Conditional(_F_, _E_));
|
||||
SymbolicBayesTree bayesTree;
|
||||
Ordering ord; ord += _A_,_B_,_C_,_D_,_E_,_F_;
|
||||
bayesTree.insert(A,ord);
|
||||
bayesTree.insert(B,ord);
|
||||
bayesTree.insert(C,ord);
|
||||
bayesTree.insert(D,ord);
|
||||
bayesTree.insert(E,ord);
|
||||
bayesTree.insert(F,ord);
|
||||
// Ordering ord; ord += _A_,_B_,_C_,_D_,_E_,_F_;
|
||||
bayesTree.insert(A);
|
||||
bayesTree.insert(B);
|
||||
bayesTree.insert(C);
|
||||
bayesTree.insert(D);
|
||||
bayesTree.insert(E);
|
||||
bayesTree.insert(F);
|
||||
|
||||
// remove C, expected outcome: factor graph with ABC,
|
||||
// Bayes Tree now contains two orphan trees: D|C and E|B,F|E
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor(_A_,_B_);
|
||||
expected.push_factor(_B_,_A_);
|
||||
expected.push_factor(_A_);
|
||||
expected.push_factor(_A_,_C_);
|
||||
expected.push_factor(_C_,_A_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_D_], bayesTree[_E_];
|
||||
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removePath(bayesTree[_C_], bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected, factors));
|
||||
SymbolicFactorGraph factors(bn);
|
||||
CHECK(assert_equal((SymbolicFactorGraph)expected, factors));
|
||||
CHECK(assert_equal(expectedOrphans, orphans));
|
||||
|
||||
// remove E: factor graph with EB; E|B removed from second orphan tree
|
||||
SymbolicFactorGraph expected2;
|
||||
expected2.push_factor(_B_,_E_);
|
||||
expected2.push_factor(_E_,_B_);
|
||||
SymbolicBayesTree::Cliques expectedOrphans2;
|
||||
expectedOrphans2 += bayesTree[_F_];
|
||||
|
||||
BayesNet<SymbolicConditional> bn2;
|
||||
BayesNet<Conditional> bn2;
|
||||
SymbolicBayesTree::Cliques orphans2;
|
||||
bayesTree.removePath(bayesTree[_E_], bn2, orphans2);
|
||||
FactorGraph<SymbolicFactor> factors2(bn2);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected2, factors2));
|
||||
SymbolicFactorGraph factors2(bn2);
|
||||
CHECK(assert_equal((SymbolicFactorGraph)expected2, factors2));
|
||||
CHECK(assert_equal(expectedOrphans2, orphans2));
|
||||
}
|
||||
|
||||
|
|
@ -185,17 +186,17 @@ TEST( BayesTree, removePath2 )
|
|||
SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree();
|
||||
|
||||
// Call remove-path with clique B
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removePath(bayesTree[_B_], bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor(_B_,_L_,_E_);
|
||||
expected.push_factor(_B_,_L_);
|
||||
expected.push_factor(_E_,_L_,_B_);
|
||||
expected.push_factor(_L_,_B_);
|
||||
expected.push_factor(_B_);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected, factors));
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_S_], bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_equal(expectedOrphans, orphans));
|
||||
|
|
@ -207,18 +208,18 @@ TEST( BayesTree, removePath3 )
|
|||
SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree();
|
||||
|
||||
// Call remove-path with clique S
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removePath(bayesTree[_S_], bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor(_B_,_L_,_E_);
|
||||
expected.push_factor(_B_,_L_);
|
||||
expected.push_factor(_E_,_L_,_B_);
|
||||
expected.push_factor(_L_,_B_);
|
||||
expected.push_factor(_B_);
|
||||
expected.push_factor(_L_,_B_,_S_);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected, factors));
|
||||
expected.push_factor(_S_,_L_,_B_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_equal(expectedOrphans, orphans));
|
||||
|
|
@ -230,33 +231,35 @@ TEST( BayesTree, removeTop )
|
|||
SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree();
|
||||
|
||||
// create a new factor to be inserted
|
||||
boost::shared_ptr<SymbolicFactor> newFactor(new SymbolicFactor(_B_,_S_));
|
||||
boost::shared_ptr<Factor> newFactor(new Factor(_S_,_B_));
|
||||
|
||||
// Remove the contaminated part of the Bayes tree
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(newFactor->keys(), bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
list<varid_t> keys; keys += _B_,_S_;
|
||||
bayesTree.removeTop(keys, bn, orphans);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor(_B_,_L_,_E_);
|
||||
expected.push_factor(_B_,_L_);
|
||||
expected.push_factor(_E_,_L_,_B_);
|
||||
expected.push_factor(_L_,_B_);
|
||||
expected.push_factor(_B_);
|
||||
expected.push_factor(_L_,_B_,_S_);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected, factors));
|
||||
expected.push_factor(_S_,_L_,_B_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_equal(expectedOrphans, orphans));
|
||||
|
||||
// Try removeTop again with a factor that should not change a thing
|
||||
boost::shared_ptr<SymbolicFactor> newFactor2(new SymbolicFactor(_B_));
|
||||
BayesNet<SymbolicConditional> bn2;
|
||||
boost::shared_ptr<Factor> newFactor2(new Factor(_B_));
|
||||
BayesNet<Conditional> bn2;
|
||||
SymbolicBayesTree::Cliques orphans2;
|
||||
bayesTree.removeTop(newFactor2->keys(), bn2, orphans2);
|
||||
FactorGraph<SymbolicFactor> factors2(bn2);
|
||||
keys.clear(); keys += _B_;
|
||||
bayesTree.removeTop(keys, bn2, orphans2);
|
||||
SymbolicFactorGraph factors2(bn2);
|
||||
SymbolicFactorGraph expected2;
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected2, factors2));
|
||||
CHECK(assert_equal(expected2, factors2));
|
||||
SymbolicBayesTree::Cliques expectedOrphans2;
|
||||
CHECK(assert_equal(expectedOrphans2, orphans2));
|
||||
}
|
||||
|
|
@ -272,18 +275,19 @@ TEST( BayesTree, removeTop2 )
|
|||
newFactors.push_factor(_S_);
|
||||
|
||||
// Remove the contaminated part of the Bayes tree
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(newFactors.keys(), bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
list<varid_t> keys; keys += _B_,_S_;
|
||||
bayesTree.removeTop(keys, bn, orphans);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
|
||||
// Check expected outcome
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor(_B_,_L_,_E_);
|
||||
expected.push_factor(_B_,_L_);
|
||||
expected.push_factor(_E_,_L_,_B_);
|
||||
expected.push_factor(_L_,_B_);
|
||||
expected.push_factor(_B_);
|
||||
expected.push_factor(_L_,_B_,_S_);
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected, factors));
|
||||
expected.push_factor(_S_,_L_,_B_);
|
||||
CHECK(assert_equal(expected, factors));
|
||||
SymbolicBayesTree::Cliques expectedOrphans;
|
||||
expectedOrphans += bayesTree[_T_], bayesTree[_X_];
|
||||
CHECK(assert_equal(expectedOrphans, orphans));
|
||||
|
|
@ -292,29 +296,29 @@ TEST( BayesTree, removeTop2 )
|
|||
/* ************************************************************************* */
|
||||
TEST( BayesTree, removeTop3 )
|
||||
{
|
||||
Symbol _l5_('l', 5), _x4_('x', 4);
|
||||
const varid_t _x4_=5, _l5_=6;
|
||||
// simple test case that failed after COLAMD was fixed/activated
|
||||
SymbolicConditional::shared_ptr
|
||||
X(new SymbolicConditional(_l5_)),
|
||||
A(new SymbolicConditional(_x4_, _l5_)),
|
||||
B(new SymbolicConditional(_x3_, _x4_)),
|
||||
C(new SymbolicConditional(_x2_, _x3_));
|
||||
Conditional::shared_ptr
|
||||
X(new Conditional(_l5_)),
|
||||
A(new Conditional(_x4_, _l5_)),
|
||||
B(new Conditional(_x2_, _x4_)),
|
||||
C(new Conditional(_x3_, _x2_));
|
||||
|
||||
Ordering newOrdering;
|
||||
newOrdering += _x3_, _x2_, _x1_, _l2_, _l1_, _x4_, _l5_;
|
||||
// Ordering newOrdering;
|
||||
// newOrdering += _x3_, _x2_, _x1_, _l2_, _l1_, _x4_, _l5_;
|
||||
SymbolicBayesTree bayesTree;
|
||||
bayesTree.insert(X,newOrdering);
|
||||
bayesTree.insert(A,newOrdering);
|
||||
bayesTree.insert(B,newOrdering);
|
||||
bayesTree.insert(C,newOrdering);
|
||||
bayesTree.insert(X);
|
||||
bayesTree.insert(A);
|
||||
bayesTree.insert(B);
|
||||
bayesTree.insert(C);
|
||||
|
||||
// remove all
|
||||
list<Symbol> keys;
|
||||
list<varid_t> keys;
|
||||
keys += _l5_, _x2_, _x3_, _x4_;
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
BayesNet<Conditional> bn;
|
||||
SymbolicBayesTree::Cliques orphans;
|
||||
bayesTree.removeTop(keys, bn, orphans);
|
||||
FactorGraph<SymbolicFactor> factors(bn);
|
||||
SymbolicFactorGraph factors(bn);
|
||||
|
||||
CHECK(orphans.size() == 0);
|
||||
}
|
||||
|
|
@ -327,24 +331,24 @@ TEST( BayesTree, removeTop3 )
|
|||
TEST( BayesTree, insert )
|
||||
{
|
||||
// construct bayes tree by split the graph along the separator x3 - x4
|
||||
Symbol _x4_('x', 4), _x5_('x', 5), _x6_('x', 6);
|
||||
const varid_t _x1_=0, _x2_=1, _x6_=2, _x5_=3, _x3_=4, _x4_=5;
|
||||
SymbolicFactorGraph fg1, fg2, fg3;
|
||||
fg1.push_factor(_x3_, _x4_);
|
||||
fg2.push_factor(_x1_, _x2_);
|
||||
fg2.push_factor(_x2_, _x3_);
|
||||
fg2.push_factor(_x1_, _x3_);
|
||||
fg3.push_factor(_x4_, _x5_);
|
||||
fg3.push_factor(_x5_, _x6_);
|
||||
fg3.push_factor(_x4_, _x6_);
|
||||
fg3.push_factor(_x5_, _x4_);
|
||||
fg3.push_factor(_x6_, _x5_);
|
||||
fg3.push_factor(_x6_, _x4_);
|
||||
|
||||
Ordering ordering1; ordering1 += _x3_, _x4_;
|
||||
Ordering ordering2; ordering2 += _x1_, _x2_;
|
||||
Ordering ordering3; ordering3 += _x6_, _x5_;
|
||||
// Ordering ordering1; ordering1 += _x3_, _x4_;
|
||||
// Ordering ordering2; ordering2 += _x1_, _x2_;
|
||||
// Ordering ordering3; ordering3 += _x6_, _x5_;
|
||||
|
||||
BayesNet<SymbolicConditional> bn1, bn2, bn3;
|
||||
bn1 = fg1.eliminate(ordering1);
|
||||
bn2 = fg2.eliminate(ordering2);
|
||||
bn3 = fg3.eliminate(ordering3);
|
||||
BayesNet<Conditional> bn1, bn2, bn3;
|
||||
bn1 = *Inference::EliminateUntil(fg1, _x4_+1);
|
||||
bn2 = *Inference::EliminateUntil(fg2, _x2_+1);
|
||||
bn3 = *Inference::EliminateUntil(fg3, _x5_+1);
|
||||
|
||||
// insert child cliques
|
||||
SymbolicBayesTree actual;
|
||||
|
|
@ -363,13 +367,13 @@ TEST( BayesTree, insert )
|
|||
fg.push_factor(_x1_, _x2_);
|
||||
fg.push_factor(_x2_, _x3_);
|
||||
fg.push_factor(_x1_, _x3_);
|
||||
fg.push_factor(_x4_, _x5_);
|
||||
fg.push_factor(_x5_, _x6_);
|
||||
fg.push_factor(_x4_, _x6_);
|
||||
fg.push_factor(_x5_, _x4_);
|
||||
fg.push_factor(_x6_, _x5_);
|
||||
fg.push_factor(_x6_, _x4_);
|
||||
|
||||
Ordering ordering; ordering += _x1_, _x2_, _x6_, _x5_, _x3_, _x4_;
|
||||
BayesNet<SymbolicConditional> bn;
|
||||
bn = fg.eliminate(ordering);
|
||||
// Ordering ordering; ordering += _x1_, _x2_, _x6_, _x5_, _x3_, _x4_;
|
||||
BayesNet<Conditional> bn;
|
||||
bn = *Inference::Eliminate(fg);
|
||||
SymbolicBayesTree expected(bn);
|
||||
CHECK(assert_equal(expected, actual));
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
// for operator +=
|
||||
#include <boost/assign/std/list.hpp>
|
||||
#include <boost/assign/std/map.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
|
|
@ -20,40 +21,92 @@ using namespace boost::assign;
|
|||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost;
|
||||
|
||||
// explicit instantiation and typedef
|
||||
template class EliminationTree<SymbolicFactorGraph>;
|
||||
typedef EliminationTree<SymbolicFactorGraph> SymbolicEliminationTree;
|
||||
|
||||
/* ************************************************************************* *
|
||||
* graph: f(1,2) f(1,3) f(2,5) f(3,5) f(4,5)
|
||||
* tree: x1 -> x2 -> x3 -> x5 <- x4 (arrow is parent pointer)
|
||||
****************************************************************************/
|
||||
//TEST( EliminationTree, constructor )
|
||||
//{
|
||||
// Ordering ordering; ordering += "x1","x2","x3","x4","x5";
|
||||
//
|
||||
// /** build expected tree using constructor variant 1 */
|
||||
// SymbolicEliminationTree::OrderedGraphs graphs;
|
||||
// SymbolicFactorGraph c1,c2,c3,c4,c5;
|
||||
// c1.push_factor("x1","x2"); c1.push_factor("x1","x3"); graphs += make_pair("x1",c1);
|
||||
// c2.push_factor("x2","x5"); graphs += make_pair("x2",c2);
|
||||
// c3.push_factor("x3","x5"); graphs += make_pair("x3",c3);
|
||||
// c4.push_factor("x4","x5"); graphs += make_pair("x4",c4);
|
||||
// graphs += make_pair("x5",c5);
|
||||
// SymbolicEliminationTree expected(graphs);
|
||||
//
|
||||
// /** build actual tree from factor graph (variant 2) */
|
||||
// SymbolicFactorGraph fg;
|
||||
// fg.push_factor("x1","x2");
|
||||
// fg.push_factor("x1","x3");
|
||||
// fg.push_factor("x2","x5");
|
||||
// fg.push_factor("x3","x5");
|
||||
// fg.push_factor("x4","x5");
|
||||
// SymbolicEliminationTree actual(fg, ordering);
|
||||
//// GTSAM_PRINT(actual);
|
||||
//
|
||||
// CHECK(assert_equal<SymbolicEliminationTree>(expected, actual));
|
||||
//}
|
||||
|
||||
/* ************************************************************************* *
|
||||
* graph: f(1,2) f(1,3) f(2,5) f(3,5) f(4,5)
|
||||
* tree: x1 -> x2 -> x3 -> x5 <- x4 (arrow is parent pointer)
|
||||
****************************************************************************/
|
||||
TEST( EliminationTree, constructor )
|
||||
{
|
||||
Ordering ordering; ordering += "x1","x2","x3","x4","x5";
|
||||
varid_t x1=1, x2=2, x3=3, x4=4, x5=5;
|
||||
SymbolicFactorGraph fc1,fc2,fc3,fc4,fc5;
|
||||
|
||||
/** build expected tree using constructor variant 1 */
|
||||
SymbolicEliminationTree::OrderedGraphs graphs;
|
||||
SymbolicFactorGraph c1,c2,c3,c4,c5;
|
||||
c1.push_factor("x1","x2"); c1.push_factor("x1","x3"); graphs += make_pair("x1",c1);
|
||||
c2.push_factor("x2","x5"); graphs += make_pair("x2",c2);
|
||||
c3.push_factor("x3","x5"); graphs += make_pair("x3",c3);
|
||||
c4.push_factor("x4","x5"); graphs += make_pair("x4",c4);
|
||||
graphs += make_pair("x5",c5);
|
||||
SymbolicEliminationTree expected(graphs);
|
||||
fc1.push_factor(x1,x2); fc1.push_factor(x1,x3);
|
||||
list<varid_t> c1sep; c1sep += x2,x3;
|
||||
SymbolicEliminationTree::sharedNode c1(new SymbolicEliminationTree::Node(fc1, x1, c1sep.begin(), c1sep.end()));
|
||||
|
||||
/** build actual tree from factor graph (variant 2) */
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor("x1","x2");
|
||||
fg.push_factor("x1","x3");
|
||||
fg.push_factor("x2","x5");
|
||||
fg.push_factor("x3","x5");
|
||||
fg.push_factor("x4","x5");
|
||||
SymbolicEliminationTree actual(fg, ordering);
|
||||
// GTSAM_PRINT(actual);
|
||||
fc2.push_factor(x2,x5);
|
||||
list<varid_t> c2sep; c2sep += x3,x5;
|
||||
SymbolicEliminationTree::sharedNode c2(new SymbolicEliminationTree::Node(fc2, x2, c2sep.begin(), c2sep.end()));
|
||||
|
||||
CHECK(assert_equal<SymbolicEliminationTree>(expected, actual));
|
||||
fc3.push_factor(x3,x5);
|
||||
list<varid_t> c3sep; c3sep += x5;
|
||||
SymbolicEliminationTree::sharedNode c3(new SymbolicEliminationTree::Node(fc3, x3, c3sep.begin(), c3sep.end()));
|
||||
|
||||
fc4.push_factor(x4,x5);
|
||||
list<varid_t> c4sep; c4sep += x5;
|
||||
SymbolicEliminationTree::sharedNode c4(new SymbolicEliminationTree::Node(fc4, x4, c4sep.begin(), c4sep.end()));
|
||||
|
||||
list<varid_t> c5sep;
|
||||
SymbolicEliminationTree::sharedNode c5(new SymbolicEliminationTree::Node(fc5, x5, c5sep.begin(), c5sep.end()));
|
||||
|
||||
/** build expected tree using test accessor */
|
||||
SymbolicEliminationTree expected;
|
||||
_EliminationTreeTester<SymbolicFactorGraph> expected_(expected);
|
||||
expected_.nodes().resize(6);
|
||||
expected_.root() = c5; expected_.nodes()[x5]=c5;
|
||||
c5->addChild(c4); c4->parent()=c5; expected_.nodes()[x4]=c4;
|
||||
c5->addChild(c3); c3->parent()=c5; expected_.nodes()[x3]=c3;
|
||||
c3->addChild(c2); c2->parent()=c3; expected_.nodes()[x2]=c2;
|
||||
c2->addChild(c1); c1->parent()=c2; expected_.nodes()[x1]=c1;
|
||||
|
||||
/** build actual tree from factor graph (variant 2) */
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor(x1,x2);
|
||||
fg.push_factor(x1,x3);
|
||||
fg.push_factor(x2,x5);
|
||||
fg.push_factor(x3,x5);
|
||||
fg.push_factor(x4,x5);
|
||||
SymbolicEliminationTree actual(fg);
|
||||
// GTSAM_PRINT(actual);
|
||||
|
||||
CHECK(assert_equal<SymbolicEliminationTree>(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -22,64 +22,64 @@ using namespace gtsam;
|
|||
|
||||
typedef boost::shared_ptr<SymbolicFactorGraph> shared;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( FactorGraph, splitMinimumSpanningTree )
|
||||
{
|
||||
SymbolicFactorGraph G;
|
||||
G.push_factor("x1", "x2");
|
||||
G.push_factor("x1", "x3");
|
||||
G.push_factor("x1", "x4");
|
||||
G.push_factor("x2", "x3");
|
||||
G.push_factor("x2", "x4");
|
||||
G.push_factor("x3", "x4");
|
||||
///* ************************************************************************* */
|
||||
// SL-FIX TEST( FactorGraph, splitMinimumSpanningTree )
|
||||
//{
|
||||
// SymbolicFactorGraph G;
|
||||
// G.push_factor("x1", "x2");
|
||||
// G.push_factor("x1", "x3");
|
||||
// G.push_factor("x1", "x4");
|
||||
// G.push_factor("x2", "x3");
|
||||
// G.push_factor("x2", "x4");
|
||||
// G.push_factor("x3", "x4");
|
||||
//
|
||||
// SymbolicFactorGraph T, C;
|
||||
// boost::tie(T, C) = G.splitMinimumSpanningTree();
|
||||
//
|
||||
// SymbolicFactorGraph expectedT, expectedC;
|
||||
// expectedT.push_factor("x1", "x2");
|
||||
// expectedT.push_factor("x1", "x3");
|
||||
// expectedT.push_factor("x1", "x4");
|
||||
// expectedC.push_factor("x2", "x3");
|
||||
// expectedC.push_factor("x2", "x4");
|
||||
// expectedC.push_factor("x3", "x4");
|
||||
// CHECK(assert_equal(expectedT,T));
|
||||
// CHECK(assert_equal(expectedC,C));
|
||||
//}
|
||||
|
||||
SymbolicFactorGraph T, C;
|
||||
boost::tie(T, C) = G.splitMinimumSpanningTree();
|
||||
|
||||
SymbolicFactorGraph expectedT, expectedC;
|
||||
expectedT.push_factor("x1", "x2");
|
||||
expectedT.push_factor("x1", "x3");
|
||||
expectedT.push_factor("x1", "x4");
|
||||
expectedC.push_factor("x2", "x3");
|
||||
expectedC.push_factor("x2", "x4");
|
||||
expectedC.push_factor("x3", "x4");
|
||||
CHECK(assert_equal(expectedT,T));
|
||||
CHECK(assert_equal(expectedC,C));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* x1 - x2 - x3 - x4 - x5
|
||||
* | | / |
|
||||
* l1 l2 l3
|
||||
*/
|
||||
TEST( FactorGraph, removeSingletons )
|
||||
{
|
||||
SymbolicFactorGraph G;
|
||||
G.push_factor("x1", "x2");
|
||||
G.push_factor("x2", "x3");
|
||||
G.push_factor("x3", "x4");
|
||||
G.push_factor("x4", "x5");
|
||||
G.push_factor("x2", "l1");
|
||||
G.push_factor("x3", "l2");
|
||||
G.push_factor("x4", "l2");
|
||||
G.push_factor("x4", "l3");
|
||||
|
||||
SymbolicFactorGraph singletonGraph;
|
||||
set<Symbol> singletons;
|
||||
boost::tie(singletonGraph, singletons) = G.removeSingletons();
|
||||
|
||||
set<Symbol> singletons_excepted; singletons_excepted += "x1", "x2", "x5", "l1", "l3";
|
||||
CHECK(singletons_excepted == singletons);
|
||||
|
||||
SymbolicFactorGraph singletonGraph_excepted;
|
||||
singletonGraph_excepted.push_factor("x2", "l1");
|
||||
singletonGraph_excepted.push_factor("x4", "l3");
|
||||
singletonGraph_excepted.push_factor("x1", "x2");
|
||||
singletonGraph_excepted.push_factor("x4", "x5");
|
||||
singletonGraph_excepted.push_factor("x2", "x3");
|
||||
CHECK(singletonGraph_excepted.equals(singletonGraph));
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
///**
|
||||
// * x1 - x2 - x3 - x4 - x5
|
||||
// * | | / |
|
||||
// * l1 l2 l3
|
||||
// */
|
||||
// SL-FIX TEST( FactorGraph, removeSingletons )
|
||||
//{
|
||||
// SymbolicFactorGraph G;
|
||||
// G.push_factor("x1", "x2");
|
||||
// G.push_factor("x2", "x3");
|
||||
// G.push_factor("x3", "x4");
|
||||
// G.push_factor("x4", "x5");
|
||||
// G.push_factor("x2", "l1");
|
||||
// G.push_factor("x3", "l2");
|
||||
// G.push_factor("x4", "l2");
|
||||
// G.push_factor("x4", "l3");
|
||||
//
|
||||
// SymbolicFactorGraph singletonGraph;
|
||||
// set<Symbol> singletons;
|
||||
// boost::tie(singletonGraph, singletons) = G.removeSingletons();
|
||||
//
|
||||
// set<Symbol> singletons_excepted; singletons_excepted += "x1", "x2", "x5", "l1", "l3";
|
||||
// CHECK(singletons_excepted == singletons);
|
||||
//
|
||||
// SymbolicFactorGraph singletonGraph_excepted;
|
||||
// singletonGraph_excepted.push_factor("x2", "l1");
|
||||
// singletonGraph_excepted.push_factor("x4", "l3");
|
||||
// singletonGraph_excepted.push_factor("x1", "x2");
|
||||
// singletonGraph_excepted.push_factor("x4", "x5");
|
||||
// singletonGraph_excepted.push_factor("x2", "x3");
|
||||
// CHECK(singletonGraph_excepted.equals(singletonGraph));
|
||||
//}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -12,15 +12,15 @@ using namespace boost::assign;
|
|||
|
||||
#define GTSAM_MAGIC_KEY
|
||||
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/ISAM-inl.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
typedef ISAM<SymbolicConditional> SymbolicISAM;
|
||||
typedef ISAM<Conditional> SymbolicISAM;
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Some numbers that should be consistent among all smoother tests
|
||||
|
|
|
|||
|
|
@ -10,20 +10,21 @@
|
|||
using namespace boost::assign;
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#define GTSAM_MAGIC_KEY
|
||||
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/nonlinear/Ordering.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/JunctionTree.h>
|
||||
#include <gtsam/inference/ClusterTree-inl.h>
|
||||
#include <gtsam/inference/JunctionTree-inl.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
|
||||
using namespace gtsam;
|
||||
|
||||
// explicit instantiation and typedef
|
||||
template class JunctionTree<SymbolicFactorGraph>;
|
||||
typedef JunctionTree<SymbolicFactorGraph> SymbolicJunctionTree;
|
||||
typedef BayesTree<Conditional> SymbolicBayesTree;
|
||||
|
||||
/* ************************************************************************* *
|
||||
* x1 - x2 - x3 - x4
|
||||
|
|
@ -32,27 +33,52 @@ typedef JunctionTree<SymbolicFactorGraph> SymbolicJunctionTree;
|
|||
****************************************************************************/
|
||||
TEST( JunctionTree, constructor )
|
||||
{
|
||||
const varid_t x2=0, x1=1, x3=2, x4=3;
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor("x1","x2");
|
||||
fg.push_factor("x2","x3");
|
||||
fg.push_factor("x3","x4");
|
||||
fg.push_factor(x2,x1);
|
||||
fg.push_factor(x2,x3);
|
||||
fg.push_factor(x3,x4);
|
||||
|
||||
SymbolicJunctionTree expected;
|
||||
SymbolicJunctionTree actual(fg);
|
||||
|
||||
Ordering ordering; ordering += "x2","x1","x3","x4";
|
||||
SymbolicJunctionTree actual(fg, ordering);
|
||||
// CHECK(assert_equal<SymbolicJunctionTree>(expected, actual));
|
||||
|
||||
Ordering frontal1; frontal1 += "x3", "x4";
|
||||
Ordering frontal2; frontal2 += "x2", "x1";
|
||||
Unordered sep1;
|
||||
Unordered sep2; sep2 += "x3";
|
||||
CHECK(assert_equal(frontal1, actual.root()->frontal_));
|
||||
CHECK(assert_equal(sep1, actual.root()->separator_));
|
||||
vector<varid_t> frontal1; frontal1 += x3, x4;
|
||||
vector<varid_t> frontal2; frontal2 += x2, x1;
|
||||
vector<varid_t> sep1;
|
||||
vector<varid_t> sep2; sep2 += x3;
|
||||
CHECK(assert_equal(frontal1, actual.root()->frontal));
|
||||
CHECK(assert_equal(sep1, actual.root()->separator));
|
||||
LONGS_EQUAL(1, actual.root()->size());
|
||||
CHECK(assert_equal(frontal2, actual.root()->children_[0]->frontal_));
|
||||
CHECK(assert_equal(sep2, actual.root()->children_[0]->separator_));
|
||||
LONGS_EQUAL(2, actual.root()->children_[0]->size());
|
||||
CHECK(assert_equal(frontal2, actual.root()->children().front()->frontal));
|
||||
CHECK(assert_equal(sep2, actual.root()->children().front()->separator));
|
||||
LONGS_EQUAL(2, actual.root()->children().front()->size());
|
||||
CHECK(assert_equal(*fg[2], *(*actual.root())[0]));
|
||||
CHECK(assert_equal(*fg[0], *(*actual.root()->children().front())[0]));
|
||||
CHECK(assert_equal(*fg[1], *(*actual.root()->children().front())[1]));
|
||||
}
|
||||
|
||||
/* ************************************************************************* *
|
||||
* x1 - x2 - x3 - x4
|
||||
* x3 x4
|
||||
* x2 x1 : x3
|
||||
****************************************************************************/
|
||||
TEST( JunctionTree, eliminate)
|
||||
{
|
||||
const varid_t x2=0, x1=1, x3=2, x4=3;
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor(x2,x1);
|
||||
fg.push_factor(x2,x3);
|
||||
fg.push_factor(x3,x4);
|
||||
|
||||
SymbolicJunctionTree jt(fg);
|
||||
SymbolicBayesTree::sharedClique actual = jt.eliminate();
|
||||
|
||||
BayesNet<Conditional> bn = *Inference::Eliminate(fg);
|
||||
SymbolicBayesTree expected(bn);
|
||||
|
||||
// cout << "BT from JT:\n";
|
||||
// actual->printTree("");
|
||||
|
||||
CHECK(assert_equal(*expected.root(), *actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -10,27 +10,29 @@ using namespace boost::assign;
|
|||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
|
||||
#define GTSAM_MAGIC_KEY
|
||||
//#define GTSAM_MAGIC_KEY
|
||||
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
Symbol _B_('B', 0), _L_('L', 0);
|
||||
SymbolicConditional::shared_ptr
|
||||
B(new SymbolicConditional(_B_)),
|
||||
L(new SymbolicConditional(_L_, _B_));
|
||||
static const varid_t _L_ = 0;
|
||||
static const varid_t _A_ = 1;
|
||||
static const varid_t _B_ = 2;
|
||||
static const varid_t _C_ = 3;
|
||||
|
||||
Conditional::shared_ptr
|
||||
B(new Conditional(_B_)),
|
||||
L(new Conditional(_L_, _B_));
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( SymbolicBayesNet, equals )
|
||||
{
|
||||
SymbolicBayesNet f1;
|
||||
BayesNet<Conditional> f1;
|
||||
f1.push_back(B);
|
||||
f1.push_back(L);
|
||||
SymbolicBayesNet f2;
|
||||
BayesNet<Conditional> f2;
|
||||
f2.push_back(L);
|
||||
f2.push_back(B);
|
||||
CHECK(f1.equals(f1));
|
||||
|
|
@ -40,18 +42,18 @@ TEST( SymbolicBayesNet, equals )
|
|||
/* ************************************************************************* */
|
||||
TEST( SymbolicBayesNet, pop_front )
|
||||
{
|
||||
SymbolicConditional::shared_ptr
|
||||
A(new SymbolicConditional("A","B","C")),
|
||||
B(new SymbolicConditional("B","C")),
|
||||
C(new SymbolicConditional("C"));
|
||||
Conditional::shared_ptr
|
||||
A(new Conditional(_A_,_B_,_C_)),
|
||||
B(new Conditional(_B_,_C_)),
|
||||
C(new Conditional(_C_));
|
||||
|
||||
// Expected after pop_front
|
||||
SymbolicBayesNet expected;
|
||||
BayesNet<Conditional> expected;
|
||||
expected.push_back(B);
|
||||
expected.push_back(C);
|
||||
|
||||
// Actual
|
||||
SymbolicBayesNet actual;
|
||||
BayesNet<Conditional> actual;
|
||||
actual.push_back(A);
|
||||
actual.push_back(B);
|
||||
actual.push_back(C);
|
||||
|
|
@ -63,24 +65,24 @@ TEST( SymbolicBayesNet, pop_front )
|
|||
/* ************************************************************************* */
|
||||
TEST( SymbolicBayesNet, combine )
|
||||
{
|
||||
SymbolicConditional::shared_ptr
|
||||
A(new SymbolicConditional("A","B","C")),
|
||||
B(new SymbolicConditional("B","C")),
|
||||
C(new SymbolicConditional("C"));
|
||||
Conditional::shared_ptr
|
||||
A(new Conditional(_A_,_B_,_C_)),
|
||||
B(new Conditional(_B_,_C_)),
|
||||
C(new Conditional(_C_));
|
||||
|
||||
// p(A|BC)
|
||||
SymbolicBayesNet p_ABC;
|
||||
BayesNet<Conditional> p_ABC;
|
||||
p_ABC.push_back(A);
|
||||
|
||||
// P(BC)=P(B|C)P(C)
|
||||
SymbolicBayesNet p_BC;
|
||||
BayesNet<Conditional> p_BC;
|
||||
p_BC.push_back(B);
|
||||
p_BC.push_back(C);
|
||||
|
||||
// P(ABC) = P(A|BC) P(BC)
|
||||
p_ABC.push_back(p_BC);
|
||||
|
||||
SymbolicBayesNet expected;
|
||||
BayesNet<Conditional> expected;
|
||||
expected.push_back(A);
|
||||
expected.push_back(B);
|
||||
expected.push_back(C);
|
||||
|
|
|
|||
|
|
@ -5,11 +5,34 @@
|
|||
*/
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
|
||||
#include <gtsam/inference/SymbolicFactor.h>
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(SymbolicFactor, eliminate) {
|
||||
vector<varid_t> keys; keys += 2, 3, 4, 6, 7, 9, 10, 11;
|
||||
Factor actual(keys.begin(), keys.end());
|
||||
BayesNet<Conditional> fragment = *actual.eliminate(3);
|
||||
|
||||
Factor expected(keys.begin()+3, keys.end());
|
||||
Conditional expected0(keys.begin(), keys.end(), 1);
|
||||
Conditional expected1(keys.begin()+1, keys.end(), 1);
|
||||
Conditional expected2(keys.begin()+2, keys.end(), 1);
|
||||
|
||||
CHECK(assert_equal(fragment.size(), size_t(3)));
|
||||
CHECK(assert_equal(expected, actual));
|
||||
BayesNet<Conditional>::const_iterator fragmentCond = fragment.begin();
|
||||
CHECK(assert_equal(**fragmentCond++, expected0));
|
||||
CHECK(assert_equal(**fragmentCond++, expected1));
|
||||
CHECK(assert_equal(**fragmentCond++, expected2));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
|
|
|
|||
|
|
@ -9,27 +9,31 @@ using namespace boost::assign;
|
|||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
|
||||
#define GTSAM_MAGIC_KEY
|
||||
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
#include <gtsam/inference/SymbolicBayesNet.h>
|
||||
#include <gtsam/inference/BayesNet-inl.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/FactorGraph-inl.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
static const varid_t vx2=0;
|
||||
static const varid_t vx1=1;
|
||||
static const varid_t vl1=2;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( SymbolicFactorGraph, eliminate2 )
|
||||
{
|
||||
// create a test graph
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor("x1", "x2");
|
||||
fg.push_factor(vx2, vx1);
|
||||
|
||||
fg.eliminateOne("x1");
|
||||
VariableIndex<> variableIndex(fg);
|
||||
Inference::EliminateOne(fg, variableIndex, vx2);
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_back(boost::shared_ptr<SymbolicFactor>());
|
||||
expected.push_factor("x2");
|
||||
expected.push_back(boost::shared_ptr<Factor>());
|
||||
expected.push_factor(vx1);
|
||||
|
||||
CHECK(assert_equal(expected, fg));
|
||||
}
|
||||
|
|
@ -39,24 +43,24 @@ TEST( SymbolicFactorGraph, constructFromBayesNet )
|
|||
{
|
||||
// create expected factor graph
|
||||
SymbolicFactorGraph expected;
|
||||
expected.push_factor("l1","x1","x2");
|
||||
expected.push_factor("x1","l1");
|
||||
expected.push_factor("x1");
|
||||
expected.push_factor(vx2,vx1,vl1);
|
||||
expected.push_factor(vx1,vl1);
|
||||
expected.push_factor(vx1);
|
||||
|
||||
// create Bayes Net
|
||||
SymbolicConditional::shared_ptr x2(new SymbolicConditional("x2", "l1", "x1"));
|
||||
SymbolicConditional::shared_ptr l1(new SymbolicConditional("l1", "x1"));
|
||||
SymbolicConditional::shared_ptr x1(new SymbolicConditional("x1"));
|
||||
Conditional::shared_ptr x2(new Conditional(vx2, vx1, vl1));
|
||||
Conditional::shared_ptr l1(new Conditional(vx1, vl1));
|
||||
Conditional::shared_ptr x1(new Conditional(vx1));
|
||||
|
||||
SymbolicBayesNet bayesNet;
|
||||
BayesNet<Conditional> bayesNet;
|
||||
bayesNet.push_back(x2);
|
||||
bayesNet.push_back(l1);
|
||||
bayesNet.push_back(x1);
|
||||
|
||||
// create actual factor graph from a Bayes Net
|
||||
FactorGraph<SymbolicFactor> actual(bayesNet);
|
||||
SymbolicFactorGraph actual(bayesNet);
|
||||
|
||||
CHECK(assert_equal((FactorGraph<SymbolicFactor>)expected,actual));
|
||||
CHECK(assert_equal((SymbolicFactorGraph)expected,actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -65,16 +69,16 @@ TEST( SymbolicFactorGraph, push_back )
|
|||
// Create two factor graphs and expected combined graph
|
||||
SymbolicFactorGraph fg1, fg2, expected;
|
||||
|
||||
fg1.push_factor("x1");
|
||||
fg1.push_factor("x1","x2");
|
||||
fg1.push_factor(vx1);
|
||||
fg1.push_factor(vx2,vx1);
|
||||
|
||||
fg2.push_factor("l1","x1");
|
||||
fg2.push_factor("l1","x2");
|
||||
fg2.push_factor(vx1,vl1);
|
||||
fg2.push_factor(vx2,vl1);
|
||||
|
||||
expected.push_factor("x1");
|
||||
expected.push_factor("x1","x2");
|
||||
expected.push_factor("l1","x1");
|
||||
expected.push_factor("l1","x2");
|
||||
expected.push_factor(vx1);
|
||||
expected.push_factor(vx2,vx1);
|
||||
expected.push_factor(vx1,vl1);
|
||||
expected.push_factor(vx2,vl1);
|
||||
|
||||
// combine
|
||||
SymbolicFactorGraph actual = combine(fg1,fg2);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
/**
|
||||
* @file testVariableIndex.cpp
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Sep 26, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(VariableIndex, augment) {
|
||||
|
||||
SymbolicFactorGraph fg1, fg2;
|
||||
fg1.push_factor(0, 1);
|
||||
fg1.push_factor(0, 2);
|
||||
fg1.push_factor(5, 9);
|
||||
fg1.push_factor(2, 3);
|
||||
fg2.push_factor(1, 3);
|
||||
fg2.push_factor(2, 4);
|
||||
fg2.push_factor(3, 5);
|
||||
fg2.push_factor(5, 6);
|
||||
|
||||
SymbolicFactorGraph fgCombined; fgCombined.push_back(fg1); fgCombined.push_back(fg2);
|
||||
|
||||
VariableIndex<> expected(fgCombined);
|
||||
VariableIndex<> actual(fg1);
|
||||
actual.augment(fg2);
|
||||
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
/**
|
||||
* @file testVariableSlots.cpp
|
||||
* @brief
|
||||
* @author Richard Roberts
|
||||
* @created Oct 5, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
||||
#include <gtsam/inference/VariableSlots-inl.h>
|
||||
#include <gtsam/inference/SymbolicFactorGraph.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace std;
|
||||
using namespace boost::assign;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(VariableSlots, constructor) {
|
||||
|
||||
SymbolicFactorGraph fg;
|
||||
fg.push_factor(2, 3);
|
||||
fg.push_factor(0, 1);
|
||||
fg.push_factor(0, 2);
|
||||
fg.push_factor(5, 9);
|
||||
|
||||
VariableSlots actual(fg);
|
||||
|
||||
static const size_t none = numeric_limits<size_t>::max();
|
||||
VariableSlots expected((SymbolicFactorGraph()));
|
||||
expected[0] += none, 0, 0, none;
|
||||
expected[1] += none, 1, none, none;
|
||||
expected[2] += 0, none, 1, none;
|
||||
expected[3] += 1, none, none, none;
|
||||
expected[5] += none, none, none, 0;
|
||||
expected[9] += none, none, none, 1;
|
||||
|
||||
CHECK(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
||||
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include <stdexcept>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -22,12 +23,11 @@ namespace gtsam {
|
|||
template <class NonlinearGraph, class Config>
|
||||
class Factorization {
|
||||
private:
|
||||
boost::shared_ptr<const Ordering> ordering_;
|
||||
bool useOldEliminate_;
|
||||
boost::shared_ptr<Ordering> ordering_;
|
||||
|
||||
public:
|
||||
Factorization(boost::shared_ptr<const Ordering> ordering, bool old=true)
|
||||
: ordering_(ordering), useOldEliminate_(old) {
|
||||
Factorization(boost::shared_ptr<Ordering> ordering)
|
||||
: ordering_(ordering) {
|
||||
if (!ordering) throw std::invalid_argument("Factorization constructor: ordering = NULL");
|
||||
}
|
||||
|
||||
|
|
@ -36,14 +36,14 @@ namespace gtsam {
|
|||
* the resulted linear system
|
||||
*/
|
||||
VectorConfig optimize(GaussianFactorGraph& fg) const {
|
||||
return fg.optimize(*ordering_, useOldEliminate_);
|
||||
return gtsam::optimize(*Inference::Eliminate(fg));
|
||||
}
|
||||
|
||||
/**
|
||||
* linearize the non-linear graph around the current config
|
||||
*/
|
||||
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
|
||||
return g.linearize(config);
|
||||
return g.linearize(config, *ordering_);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -52,6 +52,11 @@ namespace gtsam {
|
|||
boost::shared_ptr<Factorization> prepareLinear(const GaussianFactorGraph& fg) const {
|
||||
return boost::shared_ptr<Factorization>(new Factorization(*this));
|
||||
}
|
||||
|
||||
/** expmap the Config given the stored Ordering */
|
||||
Config expmap(const Config& config, const VectorConfig& delta) const {
|
||||
return config.expmap(delta, *ordering_);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@
|
|||
#include <boost/foreach.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
|
||||
#include <gtsam/base/Matrix-inl.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/VectorConfig.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
|
@ -26,7 +26,7 @@ template class BayesNet<GaussianConditional>;
|
|||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesNet scalarGaussian(const Symbol& key, double mu, double sigma) {
|
||||
GaussianBayesNet scalarGaussian(varid_t key, double mu, double sigma) {
|
||||
GaussianBayesNet bn;
|
||||
GaussianConditional::shared_ptr
|
||||
conditional(new GaussianConditional(key, Vector_(1,mu)/sigma, eye(1)/sigma, ones(1)));
|
||||
|
|
@ -35,7 +35,7 @@ GaussianBayesNet scalarGaussian(const Symbol& key, double mu, double sigma) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesNet simpleGaussian(const Symbol& key, const Vector& mu, double sigma) {
|
||||
GaussianBayesNet simpleGaussian(varid_t key, const Vector& mu, double sigma) {
|
||||
GaussianBayesNet bn;
|
||||
size_t n = mu.size();
|
||||
GaussianConditional::shared_ptr
|
||||
|
|
@ -45,19 +45,29 @@ GaussianBayesNet simpleGaussian(const Symbol& key, const Vector& mu, double sigm
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void push_front(GaussianBayesNet& bn, const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, Vector sigmas) {
|
||||
void push_front(GaussianBayesNet& bn, varid_t key, Vector d, Matrix R,
|
||||
varid_t name1, Matrix S, Vector sigmas) {
|
||||
GaussianConditional::shared_ptr cg(new GaussianConditional(key, d, R, name1, S, sigmas));
|
||||
bn.push_front(cg);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void push_front(GaussianBayesNet& bn, const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, const Symbol& name2, Matrix T, Vector sigmas) {
|
||||
void push_front(GaussianBayesNet& bn, varid_t key, Vector d, Matrix R,
|
||||
varid_t name1, Matrix S, varid_t name2, Matrix T, Vector sigmas) {
|
||||
GaussianConditional::shared_ptr cg(new GaussianConditional(key, d, R, name1, S, name2, T, sigmas));
|
||||
bn.push_front(cg);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<VectorConfig> allocateVectorConfig(const GaussianBayesNet& bn) {
|
||||
vector<size_t> dimensions(bn.size());
|
||||
varid_t var = 0;
|
||||
BOOST_FOREACH(const boost::shared_ptr<const GaussianConditional> conditional, bn) {
|
||||
dimensions[var++] = conditional->get_R().size1();
|
||||
}
|
||||
return boost::shared_ptr<VectorConfig>(new VectorConfig(dimensions));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig optimize(const GaussianBayesNet& bn)
|
||||
{
|
||||
|
|
@ -67,19 +77,19 @@ VectorConfig optimize(const GaussianBayesNet& bn)
|
|||
/* ************************************************************************* */
|
||||
boost::shared_ptr<VectorConfig> optimize_(const GaussianBayesNet& bn)
|
||||
{
|
||||
boost::shared_ptr<VectorConfig> result(new VectorConfig);
|
||||
boost::shared_ptr<VectorConfig> result(allocateVectorConfig(bn));
|
||||
|
||||
/** solve each node in turn in topological sort order (parents first)*/
|
||||
BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) {
|
||||
Vector x = cg->solve(*result); // Solve for that variable
|
||||
result->insert(cg->key(),x); // store result in partial solution
|
||||
(*result)[cg->key()] = x; // store result in partial solution
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig backSubstitute(const GaussianBayesNet& bn, const VectorConfig& y) {
|
||||
VectorConfig x = y;
|
||||
VectorConfig x(y);
|
||||
backSubstituteInPlace(bn,x);
|
||||
return x;
|
||||
}
|
||||
|
|
@ -89,16 +99,14 @@ VectorConfig backSubstitute(const GaussianBayesNet& bn, const VectorConfig& y) {
|
|||
void backSubstituteInPlace(const GaussianBayesNet& bn, VectorConfig& y) {
|
||||
VectorConfig& x = y;
|
||||
/** solve each node in turn in topological sort order (parents first)*/
|
||||
BOOST_REVERSE_FOREACH(GaussianConditional::shared_ptr cg, bn) {
|
||||
BOOST_REVERSE_FOREACH(const boost::shared_ptr<const GaussianConditional> cg, bn) {
|
||||
// i^th part of R*x=y, x=inv(R)*y
|
||||
// (Rii*xi + R_i*x(i+1:))./si = yi <-> xi = inv(Rii)*(yi.*si - R_i*x(i+1:))
|
||||
const Symbol& i = cg->key();
|
||||
varid_t i = cg->key();
|
||||
Vector zi = emul(y[i],cg->get_sigmas());
|
||||
GaussianConditional::const_iterator it;
|
||||
for (it = cg->parentsBegin(); it!= cg->parentsEnd(); it++) {
|
||||
const Symbol& j = it->first;
|
||||
const Matrix& Rij = it->second;
|
||||
multiplyAdd(-1.0,Rij,x[j],zi);
|
||||
for (it = cg->beginParents(); it!= cg->endParents(); it++) {
|
||||
multiplyAdd(-1.0,cg->get_S(it),x[*it],zi);
|
||||
}
|
||||
x[i] = gtsam::backSubstituteUpper(cg->get_R(), zi);
|
||||
}
|
||||
|
|
@ -117,20 +125,19 @@ VectorConfig backSubstituteTranspose(const GaussianBayesNet& bn,
|
|||
|
||||
// we loop from first-eliminated to last-eliminated
|
||||
// i^th part of L*gy=gx is done block-column by block-column of L
|
||||
BOOST_FOREACH(GaussianConditional::shared_ptr cg, bn) {
|
||||
const Symbol& j = cg->key();
|
||||
BOOST_FOREACH(const boost::shared_ptr<const GaussianConditional> cg, bn) {
|
||||
varid_t j = cg->key();
|
||||
gy[j] = gtsam::backSubstituteUpper(gy[j],cg->get_R());
|
||||
GaussianConditional::const_iterator it;
|
||||
for (it = cg->parentsBegin(); it!= cg->parentsEnd(); it++) {
|
||||
const Symbol& i = it->first;
|
||||
const Matrix& Rij = it->second;
|
||||
transposeMultiplyAdd(-1.0,Rij,gy[j],gy[i]);
|
||||
for (it = cg->beginParents(); it!= cg->endParents(); it++) {
|
||||
const varid_t i = *it;
|
||||
transposeMultiplyAdd(-1.0,cg->get_S(it),gy[j],gy[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Scale gy
|
||||
BOOST_FOREACH(GaussianConditional::shared_ptr cg, bn) {
|
||||
const Symbol& j = cg->key();
|
||||
varid_t j = cg->key();
|
||||
gy[j] = emul(gy[j],cg->get_sigmas());
|
||||
}
|
||||
|
||||
|
|
@ -142,7 +149,7 @@ pair<Matrix,Vector> matrix(const GaussianBayesNet& bn) {
|
|||
|
||||
// add the dimensions of all variables to get matrix dimension
|
||||
// and at the same time create a mapping from keys to indices
|
||||
size_t N=0; SymbolMap<size_t> mapping;
|
||||
size_t N=0; map<varid_t,size_t> mapping;
|
||||
BOOST_FOREACH(GaussianConditional::shared_ptr cg,bn) {
|
||||
mapping.insert(make_pair(cg->key(),N));
|
||||
N += cg->dim();
|
||||
|
|
@ -151,32 +158,32 @@ pair<Matrix,Vector> matrix(const GaussianBayesNet& bn) {
|
|||
// create matrix and copy in values
|
||||
Matrix R = zeros(N,N);
|
||||
Vector d(N);
|
||||
Symbol key; size_t I;
|
||||
varid_t key; size_t I;
|
||||
FOREACH_PAIR(key,I,mapping) {
|
||||
// find corresponding conditional
|
||||
GaussianConditional::shared_ptr cg = bn[key];
|
||||
boost::shared_ptr<const GaussianConditional> cg = bn[key];
|
||||
|
||||
// get sigmas
|
||||
Vector sigmas = cg->get_sigmas();
|
||||
|
||||
// get RHS and copy to d
|
||||
const Vector& d_ = cg->get_d();
|
||||
GaussianConditional::const_d_type d_ = cg->get_d();
|
||||
const size_t n = d_.size();
|
||||
for (size_t i=0;i<n;i++)
|
||||
d(I+i) = d_(i)/sigmas(i);
|
||||
|
||||
// get leading R matrix and copy to R
|
||||
const Matrix& R_ = cg->get_R();
|
||||
GaussianConditional::const_r_type R_ = cg->get_R();
|
||||
for (size_t i=0;i<n;i++)
|
||||
for(size_t j=0;j<n;j++)
|
||||
R(I+i,I+j) = R_(i,j)/sigmas(i);
|
||||
|
||||
// loop over S matrices and copy them into R
|
||||
GaussianConditional::const_iterator keyS = cg->parentsBegin();
|
||||
for (; keyS!=cg->parentsEnd(); keyS++) {
|
||||
Matrix S = keyS->second; // get S matrix
|
||||
GaussianConditional::const_iterator keyS = cg->beginParents();
|
||||
for (; keyS!=cg->endParents(); keyS++) {
|
||||
Matrix S = cg->get_S(keyS); // get S matrix
|
||||
const size_t m = S.size1(), n = S.size2(); // find S size
|
||||
const size_t J = mapping[keyS->first]; // find column index
|
||||
const size_t J = mapping[*keyS]; // find column index
|
||||
for (size_t i=0;i<m;i++)
|
||||
for(size_t j=0;j<n;j++)
|
||||
R(I+i,J+j) = S(i,j)/sigmas(i);
|
||||
|
|
@ -189,18 +196,18 @@ pair<Matrix,Vector> matrix(const GaussianBayesNet& bn) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig rhs(const GaussianBayesNet& bn) {
|
||||
VectorConfig result;
|
||||
BOOST_FOREACH(GaussianConditional::shared_ptr cg,bn) {
|
||||
const Symbol& key = cg->key();
|
||||
boost::shared_ptr<VectorConfig> result(allocateVectorConfig(bn));
|
||||
BOOST_FOREACH(boost::shared_ptr<const GaussianConditional> cg,bn) {
|
||||
varid_t key = cg->key();
|
||||
// get sigmas
|
||||
Vector sigmas = cg->get_sigmas();
|
||||
const Vector& sigmas = cg->get_sigmas();
|
||||
|
||||
// get RHS and copy to d
|
||||
const Vector& d = cg->get_d();
|
||||
result.insert(key,ediv_(d,sigmas)); // TODO ediv_? I think not
|
||||
GaussianConditional::const_d_type d = cg->get_d();
|
||||
(*result)[key] = ediv_(d,sigmas); // TODO ediv_? I think not
|
||||
}
|
||||
|
||||
return result;
|
||||
return *result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -11,9 +11,11 @@
|
|||
|
||||
#include <list>
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/inference/BayesNet.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
@ -21,24 +23,29 @@ namespace gtsam {
|
|||
typedef BayesNet<GaussianConditional> GaussianBayesNet;
|
||||
|
||||
/** Create a scalar Gaussian */
|
||||
GaussianBayesNet scalarGaussian(const Symbol& key, double mu=0.0, double sigma=1.0);
|
||||
GaussianBayesNet scalarGaussian(varid_t key, double mu=0.0, double sigma=1.0);
|
||||
|
||||
/** Create a simple Gaussian on a single multivariate variable */
|
||||
GaussianBayesNet simpleGaussian(const Symbol& key, const Vector& mu, double sigma=1.0);
|
||||
GaussianBayesNet simpleGaussian(varid_t key, const Vector& mu, double sigma=1.0);
|
||||
|
||||
/**
|
||||
* Add a conditional node with one parent
|
||||
* |Rx+Sy-d|
|
||||
*/
|
||||
void push_front(GaussianBayesNet& bn, const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, Vector sigmas);
|
||||
void push_front(GaussianBayesNet& bn, varid_t key, Vector d, Matrix R,
|
||||
varid_t name1, Matrix S, Vector sigmas);
|
||||
|
||||
/**
|
||||
* Add a conditional node with two parents
|
||||
* |Rx+Sy+Tz-d|
|
||||
*/
|
||||
void push_front(GaussianBayesNet& bn, const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, const Symbol& name2, Matrix T, Vector sigmas);
|
||||
void push_front(GaussianBayesNet& bn, varid_t key, Vector d, Matrix R,
|
||||
varid_t name1, Matrix S, varid_t name2, Matrix T, Vector sigmas);
|
||||
|
||||
/**
|
||||
* Allocate a VectorConfig for the variables in a BayesNet
|
||||
*/
|
||||
boost::shared_ptr<VectorConfig> allocateVectorConfig(const GaussianBayesNet& bn);
|
||||
|
||||
/**
|
||||
* optimize, i.e. return x = inv(R)*d
|
||||
|
|
|
|||
|
|
@ -6,77 +6,115 @@
|
|||
|
||||
#include <string.h>
|
||||
#include <boost/numeric/ublas/vector.hpp>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <boost/numeric/ublas/operation.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/base/Matrix-inl.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
namespace ublas = boost::numeric::ublas;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(const Symbol& key,Vector d, Matrix R, Vector sigmas) :
|
||||
Conditional (key), R_(R),d_(d),sigmas_(sigmas)
|
||||
{
|
||||
GaussianConditional::GaussianConditional() : rsd_(matrix_) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(varid_t key) : Conditional(key), rsd_(matrix_) {}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(varid_t key,const Vector& d, const Matrix& R, const Vector& sigmas) :
|
||||
Conditional(key), rsd_(matrix_), sigmas_(sigmas) {
|
||||
assert(R.size1() <= R.size2());
|
||||
size_t dims[] = { R.size2(), 1 };
|
||||
rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+2, d.size()));
|
||||
ublas::noalias(rsd_(0)) = ublas::triangular_adaptor<const Matrix, ublas::upper>(R);
|
||||
ublas::noalias(get_d_()) = d;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, Vector sigmas) :
|
||||
Conditional (key), R_(R), d_(d), sigmas_(sigmas) {
|
||||
parents_.insert(make_pair(name1, S));
|
||||
GaussianConditional::GaussianConditional(varid_t key, const Vector& d, const Matrix& R,
|
||||
varid_t name1, const Matrix& S, const Vector& sigmas) :
|
||||
Conditional(key,name1), rsd_(matrix_), sigmas_(sigmas) {
|
||||
assert(R.size1() <= R.size2());
|
||||
size_t dims[] = { R.size2(), S.size2(), 1 };
|
||||
rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+3, d.size()));
|
||||
ublas::noalias(rsd_(0)) = ublas::triangular_adaptor<const Matrix, ublas::upper>(R);
|
||||
ublas::noalias(rsd_(1)) = S;
|
||||
ublas::noalias(get_d_()) = d;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, const Symbol& name2, Matrix T, Vector sigmas) :
|
||||
Conditional (key), R_(R), d_(d),sigmas_(sigmas) {
|
||||
parents_.insert(make_pair(name1, S));
|
||||
parents_.insert(make_pair(name2, T));
|
||||
GaussianConditional::GaussianConditional(varid_t key, const Vector& d, const Matrix& R,
|
||||
varid_t name1, const Matrix& S, varid_t name2, const Matrix& T, const Vector& sigmas) :
|
||||
Conditional(key,name1,name2), rsd_(matrix_), sigmas_(sigmas) {
|
||||
assert(R.size1() <= R.size2());
|
||||
size_t dims[] = { R.size2(), S.size2(), T.size2(), 1 };
|
||||
rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+4, d.size()));
|
||||
ublas::noalias(rsd_(0)) = ublas::triangular_adaptor<const Matrix, ublas::upper>(R);
|
||||
ublas::noalias(rsd_(1)) = S;
|
||||
ublas::noalias(rsd_(2)) = T;
|
||||
ublas::noalias(get_d_()) = d;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::GaussianConditional(const Symbol& key,
|
||||
const Vector& d, const Matrix& R, const SymbolMap<Matrix>& parents, Vector sigmas) :
|
||||
Conditional (key), R_(R), parents_(parents), d_(d),sigmas_(sigmas) {
|
||||
GaussianConditional::GaussianConditional(varid_t key, const Vector& d, const Matrix& R, const list<pair<varid_t, Matrix> >& parents, const Vector& sigmas) :
|
||||
rsd_(matrix_), sigmas_(sigmas) {
|
||||
assert(R.size1() <= R.size2());
|
||||
Conditional::nFrontal_ = 1;
|
||||
Conditional::factor_.keys_.resize(1+parents.size());
|
||||
size_t dims[1+parents.size()+1];
|
||||
dims[0] = R.size2();
|
||||
Conditional::factor_.keys_[0] = key;
|
||||
size_t j=1;
|
||||
for(std::list<std::pair<varid_t, Matrix> >::const_iterator parent=parents.begin(); parent!=parents.end(); ++parent) {
|
||||
Conditional::factor_.keys_[j] = parent->first;
|
||||
dims[j] = parent->second.size2();
|
||||
++ j;
|
||||
}
|
||||
dims[j] = 1;
|
||||
rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+1+parents.size()+1, d.size()));
|
||||
ublas::noalias(rsd_(0)) = ublas::triangular_adaptor<const Matrix, ublas::upper>(R);
|
||||
j = 1;
|
||||
for(std::list<std::pair<varid_t, Matrix> >::const_iterator parent=parents.begin(); parent!=parents.end(); ++parent) {
|
||||
ublas::noalias(rsd_(j)) = parent->second;
|
||||
++ j;
|
||||
}
|
||||
ublas::noalias(get_d_()) = d;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void GaussianConditional::print(const string &s) const
|
||||
{
|
||||
cout << s << ": density on " << (string)key_ << endl;
|
||||
gtsam::print(R_,"R");
|
||||
for(Parents::const_iterator it = parents_.begin() ; it != parents_.end() ; it++ ) {
|
||||
const Symbol& j = it->first;
|
||||
const Matrix& Aj = it->second;
|
||||
gtsam::print(Aj, "A["+(string)j+"]");
|
||||
cout << s << ": density on " << key() << endl;
|
||||
gtsam::print(get_R(),"R");
|
||||
for(const_iterator it = beginParents() ; it != endParents() ; it++ ) {
|
||||
gtsam::print(get_S(it), (boost::format("A[%1%]")%(*it)).str());
|
||||
}
|
||||
gtsam::print(d_,"d");
|
||||
gtsam::print(get_d(),"d");
|
||||
gtsam::print(sigmas_,"sigmas");
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool GaussianConditional::equals(const Conditional &c, double tol) const {
|
||||
if (!Conditional::equals(c)) return false;
|
||||
const GaussianConditional* p = dynamic_cast<const GaussianConditional*> (&c);
|
||||
if (p == NULL) return false;
|
||||
Parents::const_iterator it = parents_.begin();
|
||||
|
||||
bool GaussianConditional::equals(const GaussianConditional &c, double tol) const {
|
||||
// check if the size of the parents_ map is the same
|
||||
if (parents_.size() != p->parents_.size()) return false;
|
||||
if (parents().size() != c.parents().size()) return false;
|
||||
|
||||
// check if R_ and d_ are linear independent
|
||||
for (size_t i=0; i<d_.size(); i++) {
|
||||
list<Vector> rows1; rows1.push_back(row_(R_, i));
|
||||
list<Vector> rows2; rows2.push_back(row_(p->R_, i));
|
||||
for (size_t i=0; i<rsd_.size1(); i++) {
|
||||
list<Vector> rows1; rows1.push_back(row_(get_R(), i));
|
||||
list<Vector> rows2; rows2.push_back(row_(c.get_R(), i));
|
||||
|
||||
// check if the matrices are the same
|
||||
// iterate over the parents_ map
|
||||
for (it = parents_.begin(); it != parents_.end(); it++) {
|
||||
Parents::const_iterator it2 = p->parents_.find(it->first);
|
||||
if (it2 != p->parents_.end()) {
|
||||
rows1.push_back(row_(it->second, i));
|
||||
rows2.push_back(row_(it2->second,i));
|
||||
} else
|
||||
return false;
|
||||
for (const_iterator it = beginParents(); it != endParents(); ++it) {
|
||||
const_iterator it2 = c.beginParents() + (it-beginParents());
|
||||
if(*it != *(it2))
|
||||
return false;
|
||||
rows1.push_back(row_(get_S(it), i));
|
||||
rows2.push_back(row_(c.get_S(it2), i));
|
||||
}
|
||||
|
||||
Vector row1 = concatVectors(rows1);
|
||||
|
|
@ -85,28 +123,70 @@ bool GaussianConditional::equals(const Conditional &c, double tol) const {
|
|||
}
|
||||
|
||||
// check if sigmas are equal
|
||||
if (!(::equal_with_abs_tol(sigmas_, p->sigmas_, tol))) return false;
|
||||
if (!(equal_with_abs_tol(sigmas_, c.sigmas_, tol))) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
list<Symbol> GaussianConditional::parents() const {
|
||||
list<Symbol> result;
|
||||
for (Parents::const_iterator it = parents_.begin(); it != parents_.end(); it++)
|
||||
result.push_back(it->first);
|
||||
return result;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//void GaussianConditional::permuteWithInverse(const Permutation& inversePermutation) {
|
||||
// Conditional::permuteWithInverse(inversePermutation);
|
||||
// BOOST_FOREACH(Parents::value_type& parent, parents_) {
|
||||
// parent.first = inversePermutation[parent.first];
|
||||
// }
|
||||
//#ifndef NDEBUG
|
||||
// const_iterator parent = parents_.begin();
|
||||
// Conditional::const_iterator baseParent = Conditional::parents_.begin();
|
||||
// while(parent != parents_.end())
|
||||
// assert((parent++)->first == *(baseParent++));
|
||||
// assert(baseParent == Conditional::parents_.end());
|
||||
//#endif
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//bool GaussianConditional::permuteSeparatorWithInverse(const Permutation& inversePermutation) {
|
||||
// bool separatorChanged = Conditional::permuteSeparatorWithInverse(inversePermutation);
|
||||
// BOOST_FOREACH(Parents::value_type& parent, parents_) {
|
||||
// parent.first = inversePermutation[parent.first];
|
||||
// }
|
||||
//#ifndef NDEBUG
|
||||
// const_iterator parent = parents_.begin();
|
||||
// Conditional::const_iterator baseParent = Conditional::parents_.begin();
|
||||
// while(parent != parents_.end())
|
||||
// assert((parent++)->first == *(baseParent++));
|
||||
// assert(baseParent == Conditional::parents_.end());
|
||||
//#endif
|
||||
// return separatorChanged;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector GaussianConditional::solve(const VectorConfig& x) const {
|
||||
Vector rhs = d_;
|
||||
for (Parents::const_iterator it = parents_.begin(); it!= parents_.end(); it++) {
|
||||
const Symbol& j = it->first;
|
||||
const Matrix& Aj = it->second;
|
||||
multiplyAdd(-1.0,Aj,x[j],rhs);
|
||||
static const bool debug = false;
|
||||
if(debug) print("Solving conditional ");
|
||||
Vector rhs(get_d());
|
||||
for (const_iterator parent = beginParents(); parent != endParents(); ++parent) {
|
||||
ublas::axpy_prod(-get_S(parent), x[*parent], rhs, false);
|
||||
// multiplyAdd(-1.0, get_S(parent), x[*parent], rhs);
|
||||
}
|
||||
return backSubstituteUpper(R_, rhs, false);
|
||||
if(debug) gtsam::print(get_R(), "Calling backSubstituteUpper on ");
|
||||
if(debug) gtsam::print(rhs, "rhs: ");
|
||||
if(debug) {
|
||||
Vector soln = backSubstituteUpper(get_R(), rhs, false);
|
||||
gtsam::print(soln, "back-substitution solution: ");
|
||||
return soln;
|
||||
} else
|
||||
return backSubstituteUpper(get_R(), rhs, false);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector GaussianConditional::solve(const Permuted<VectorConfig>& x) const {
|
||||
Vector rhs(get_d());
|
||||
for (const_iterator parent = beginParents(); parent != endParents(); ++parent) {
|
||||
ublas::axpy_prod(-get_S(parent), x[*parent], rhs, false);
|
||||
// multiplyAdd(-1.0, get_S(parent), x[*parent], rhs);
|
||||
}
|
||||
return backSubstituteUpper(get_R(), rhs, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,22 +8,20 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <list>
|
||||
#include <boost/utility.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
//#include <boost/serialization/shared_ptr.hpp>
|
||||
#include <boost/numeric/ublas/triangular.hpp>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/Conditional.h>
|
||||
#include <gtsam/linear/VectorConfig.h>
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
#include <gtsam/base/blockMatrices.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
class GaussianFactor;
|
||||
|
||||
/**
|
||||
* A conditional Gaussian functions as the node in a Bayes network
|
||||
|
|
@ -33,20 +31,24 @@ class Ordering;
|
|||
class GaussianConditional : public Conditional {
|
||||
|
||||
public:
|
||||
typedef SymbolMap<Matrix> Parents;
|
||||
typedef Parents::const_iterator const_iterator;
|
||||
typedef GaussianFactor FactorType;
|
||||
typedef boost::shared_ptr<GaussianConditional> shared_ptr;
|
||||
|
||||
/** Store the conditional matrix as upper-triangular column-major */
|
||||
typedef boost::numeric::ublas::triangular_matrix<double, boost::numeric::ublas::upper, boost::numeric::ublas::column_major> matrix_type;
|
||||
typedef VerticalBlockView<matrix_type> rsd_type;
|
||||
|
||||
typedef rsd_type::block_type r_type;
|
||||
typedef rsd_type::const_block_type const_r_type;
|
||||
typedef rsd_type::column_type d_type;
|
||||
typedef rsd_type::const_column_type const_d_type;
|
||||
|
||||
protected:
|
||||
|
||||
/** the triangular matrix (square root information matrix) - unit normalized */
|
||||
Matrix R_;
|
||||
|
||||
/** the names and the matrices connecting to parent nodes */
|
||||
Parents parents_;
|
||||
|
||||
/** the RHS vector */
|
||||
Vector d_;
|
||||
/** Store the conditional as one big upper-triangular wide matrix, arranged
|
||||
* as [ R S1 S2 ... d ]. Access these blocks using a VerticalBlockView. */
|
||||
matrix_type matrix_;
|
||||
rsd_type rsd_;
|
||||
|
||||
/** vector of standard deviations */
|
||||
Vector sigmas_;
|
||||
|
|
@ -54,99 +56,106 @@ protected:
|
|||
public:
|
||||
|
||||
/** default constructor needed for serialization */
|
||||
GaussianConditional(){}
|
||||
GaussianConditional();
|
||||
|
||||
/** constructor */
|
||||
GaussianConditional(const Symbol& key) :
|
||||
Conditional (key) {}
|
||||
GaussianConditional(varid_t key);
|
||||
|
||||
/** constructor with no parents
|
||||
* |Rx-d|
|
||||
*/
|
||||
GaussianConditional(const Symbol& key, Vector d, Matrix R, Vector sigmas);
|
||||
GaussianConditional(varid_t key, const Vector& d, const Matrix& R, const Vector& sigmas);
|
||||
|
||||
/** constructor with only one parent
|
||||
* |Rx+Sy-d|
|
||||
*/
|
||||
GaussianConditional(const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, Vector sigmas);
|
||||
GaussianConditional(varid_t key, const Vector& d, const Matrix& R,
|
||||
varid_t name1, const Matrix& S, const Vector& sigmas);
|
||||
|
||||
/** constructor with two parents
|
||||
* |Rx+Sy+Tz-d|
|
||||
*/
|
||||
GaussianConditional(const Symbol& key, Vector d, Matrix R,
|
||||
const Symbol& name1, Matrix S, const Symbol& name2, Matrix T, Vector sigmas);
|
||||
GaussianConditional(varid_t key, const Vector& d, const Matrix& R,
|
||||
varid_t name1, const Matrix& S, varid_t name2, const Matrix& T, const Vector& sigmas);
|
||||
|
||||
/**
|
||||
* constructor with number of arbitrary parents
|
||||
* |Rx+sum(Ai*xi)-d|
|
||||
*/
|
||||
GaussianConditional(const Symbol& key, const Vector& d,
|
||||
const Matrix& R, const Parents& parents, Vector sigmas);
|
||||
GaussianConditional(varid_t key, const Vector& d,
|
||||
const Matrix& R, const std::list<std::pair<varid_t, Matrix> >& parents, const Vector& sigmas);
|
||||
|
||||
/** deconstructor */
|
||||
virtual ~GaussianConditional() {}
|
||||
/**
|
||||
* Constructor when matrices are already stored in a combined matrix, allows
|
||||
* for multiple frontal variables.
|
||||
*/
|
||||
template<typename Iterator, class Matrix>
|
||||
GaussianConditional(Iterator firstKey, Iterator lastKey, size_t nFrontals, const VerticalBlockView<Matrix>& matrices, const Vector& sigmas);
|
||||
|
||||
/** print */
|
||||
void print(const std::string& = "GaussianConditional") const;
|
||||
|
||||
/** equals function */
|
||||
bool equals(const Conditional &cg, double tol = 1e-9) const;
|
||||
bool equals(const GaussianConditional &cg, double tol = 1e-9) const;
|
||||
|
||||
// /** permute the variables in the conditional */
|
||||
// void permuteWithInverse(const Permutation& inversePermutation);
|
||||
//
|
||||
// /** Permute the variables when only separator variables need to be permuted.
|
||||
// * Returns true if any reordered variables appeared in the separator and
|
||||
// * false if not.
|
||||
// */
|
||||
// bool permuteSeparatorWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** dimension of multivariate variable */
|
||||
size_t dim() const { return R_.size2();}
|
||||
|
||||
/** return all parents */
|
||||
std::list<Symbol> parents() const;
|
||||
size_t dim() const { return rsd_.size1(); }
|
||||
|
||||
/** return stuff contained in GaussianConditional */
|
||||
const Vector& get_d() const {return d_;}
|
||||
const Matrix& get_R() const {return R_;}
|
||||
rsd_type::const_column_type get_d() const { return rsd_.column(1+nrParents(), 0); }
|
||||
rsd_type::const_block_type get_R() const { return rsd_(0); }
|
||||
rsd_type::const_block_type get_S(const_iterator variable) const { return rsd_(variable-Conditional::factor_.begin()); }
|
||||
const Vector& get_sigmas() const {return sigmas_;}
|
||||
|
||||
/** STL like, return the iterator pointing to the first node */
|
||||
const_iterator const parentsBegin() const {
|
||||
return parents_.begin();
|
||||
}
|
||||
|
||||
/** STL like, return the iterator pointing to the last node */
|
||||
const_iterator const parentsEnd() const {
|
||||
return parents_.end();
|
||||
}
|
||||
|
||||
/** find the number of parents */
|
||||
size_t nrParents() const {
|
||||
return parents_.size();
|
||||
}
|
||||
|
||||
/** determine whether a key is among the parents */
|
||||
size_t contains(const Symbol& key) const {
|
||||
return parents_.count(key);
|
||||
}
|
||||
/**
|
||||
* solve a conditional Gaussian
|
||||
* @param x configuration in which the parents values (y,z,...) are known
|
||||
* @return solution x = R \ (d - Sy - Tz - ...)
|
||||
*/
|
||||
virtual Vector solve(const VectorConfig& x) const;
|
||||
Vector solve(const VectorConfig& x) const;
|
||||
|
||||
/**
|
||||
* adds a parent
|
||||
*/
|
||||
void add(const Symbol& key, Matrix S){
|
||||
parents_.insert(make_pair(key, S));
|
||||
}
|
||||
/**
|
||||
* solve a conditional Gaussian
|
||||
* @param x configuration in which the parents values (y,z,...) are known
|
||||
* @return solution x = R \ (d - Sy - Tz - ...)
|
||||
*/
|
||||
Vector solve(const Permuted<VectorConfig>& x) const;
|
||||
|
||||
protected:
|
||||
rsd_type::column_type get_d_() { return rsd_.column(1+nrParents(), 0); }
|
||||
rsd_type::block_type get_R_() { return rsd_(0); }
|
||||
rsd_type::block_type get_S_(iterator variable) { return rsd_(variable-const_cast<const Factor&>(Conditional::factor_).begin()); }
|
||||
|
||||
friend class GaussianFactor;
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version) {
|
||||
ar & boost::serialization::make_nvp("Conditional", boost::serialization::base_object<Conditional>(*this));
|
||||
ar & BOOST_SERIALIZATION_NVP(R_);
|
||||
ar & BOOST_SERIALIZATION_NVP(parents_);
|
||||
ar & BOOST_SERIALIZATION_NVP(d_);
|
||||
ar & BOOST_SERIALIZATION_NVP(sigmas_);
|
||||
}
|
||||
// /** Serialization function */
|
||||
// friend class boost::serialization::access;
|
||||
// template<class Archive>
|
||||
// void serialize(Archive & ar, const unsigned int version) {
|
||||
// ar & boost::serialization::make_nvp("Conditional", boost::serialization::base_object<Conditional>(*this));
|
||||
// ar & BOOST_SERIALIZATION_NVP(R_);
|
||||
// ar & BOOST_SERIALIZATION_NVP(parents_);
|
||||
// ar & BOOST_SERIALIZATION_NVP(d_);
|
||||
// ar & BOOST_SERIALIZATION_NVP(sigmas_);
|
||||
// }
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<typename Iterator, class Matrix>
|
||||
GaussianConditional::GaussianConditional(Iterator firstKey, Iterator lastKey, size_t nFrontals, const VerticalBlockView<Matrix>& matrices, const Vector& sigmas) :
|
||||
Conditional(firstKey, lastKey, nFrontals), rsd_(matrix_), sigmas_(sigmas) {
|
||||
rsd_.assignNoalias(matrices);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -13,100 +13,149 @@
|
|||
#include <boost/tuple/tuple.hpp>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/bind.hpp>
|
||||
#include <boost/numeric/ublas/matrix_proxy.hpp>
|
||||
#include <boost/pool/pool_alloc.hpp>
|
||||
#include <list>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <deque>
|
||||
|
||||
#include <gtsam/inference/Factor.h>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/base/blockMatrices.h>
|
||||
#include <gtsam/inference/Factor-inl.h>
|
||||
#include <gtsam/inference/inference.h>
|
||||
#include <gtsam/inference/VariableSlots.h>
|
||||
#include <gtsam/linear/VectorConfig.h>
|
||||
#include <gtsam/linear/SharedDiagonal.h>
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
class GaussianFactorGraph;
|
||||
template<class VariableIndexStorage=VariableIndexStorage_vector> class GaussianVariableIndex;
|
||||
|
||||
/** A map from key to dimension, useful in various contexts */
|
||||
typedef SymbolMap<size_t> Dimensions;
|
||||
/** A map from key to dimension, useful in various contexts */
|
||||
typedef std::map<varid_t, size_t> Dimensions;
|
||||
|
||||
/**
|
||||
* Base Class for a linear factor.
|
||||
* GaussianFactor is non-mutable (all methods const!).
|
||||
* The factor value is exp(-0.5*||Ax-b||^2)
|
||||
*/
|
||||
class GaussianFactor: boost::noncopyable, public Factor<VectorConfig> {
|
||||
class GaussianFactor: public Factor {
|
||||
|
||||
public:
|
||||
|
||||
typedef GaussianConditional Conditional;
|
||||
typedef boost::shared_ptr<GaussianFactor> shared_ptr;
|
||||
typedef SymbolMap<Matrix>::iterator iterator;
|
||||
typedef SymbolMap<Matrix>::const_iterator const_iterator;
|
||||
typedef boost::numeric::ublas::matrix<double, boost::numeric::ublas::column_major> matrix_type;
|
||||
typedef VerticalBlockView<matrix_type> ab_type;
|
||||
|
||||
protected:
|
||||
|
||||
SharedDiagonal model_; // Gaussian noise model with diagonal covariance matrix
|
||||
SymbolMap<Matrix> As_; // linear matrices
|
||||
Vector b_; // right-hand-side
|
||||
std::vector<size_t> firstNonzeroBlocks_;
|
||||
matrix_type matrix_;
|
||||
ab_type Ab_;
|
||||
|
||||
public:
|
||||
|
||||
/* default constructor for I/O */
|
||||
GaussianFactor() {}
|
||||
/** Copy constructor */
|
||||
GaussianFactor(const GaussianFactor& gf);
|
||||
|
||||
/** default constructor for I/O */
|
||||
GaussianFactor();
|
||||
|
||||
/** Construct Null factor */
|
||||
GaussianFactor(const Vector& b_in);
|
||||
|
||||
/** Construct unary factor */
|
||||
GaussianFactor(const Symbol& key1, const Matrix& A1,
|
||||
GaussianFactor(varid_t i1, const Matrix& A1,
|
||||
const Vector& b, const SharedDiagonal& model);
|
||||
|
||||
/** Construct binary factor */
|
||||
GaussianFactor(const Symbol& key1, const Matrix& A1,
|
||||
const Symbol& key2, const Matrix& A2,
|
||||
GaussianFactor(varid_t i1, const Matrix& A1,
|
||||
varid_t i2, const Matrix& A2,
|
||||
const Vector& b, const SharedDiagonal& model);
|
||||
|
||||
/** Construct ternary factor */
|
||||
GaussianFactor(const Symbol& key1, const Matrix& A1, const Symbol& key2,
|
||||
const Matrix& A2, const Symbol& key3, const Matrix& A3,
|
||||
GaussianFactor(varid_t i1, const Matrix& A1, varid_t i2,
|
||||
const Matrix& A2, varid_t i3, const Matrix& A3,
|
||||
const Vector& b, const SharedDiagonal& model);
|
||||
|
||||
/** Construct an n-ary factor */
|
||||
GaussianFactor(const std::vector<std::pair<Symbol, Matrix> > &terms,
|
||||
GaussianFactor(const std::vector<std::pair<varid_t, Matrix> > &terms,
|
||||
const Vector &b, const SharedDiagonal& model);
|
||||
|
||||
GaussianFactor(const std::list<std::pair<Symbol, Matrix> > &terms,
|
||||
GaussianFactor(const std::list<std::pair<varid_t, Matrix> > &terms,
|
||||
const Vector &b, const SharedDiagonal& model);
|
||||
|
||||
/** Construct from Conditional Gaussian */
|
||||
GaussianFactor(const boost::shared_ptr<GaussianConditional>& cg);
|
||||
GaussianFactor(const GaussianConditional& cg);
|
||||
|
||||
/** Constructor that combines a set of factors */
|
||||
GaussianFactor(const std::vector<shared_ptr> & factors);
|
||||
|
||||
// Implementing Testable virtual functions
|
||||
// /** Constructor that combines a set of factors */
|
||||
// GaussianFactor(const std::vector<shared_ptr> & factors);
|
||||
|
||||
// Implementing Testable interface
|
||||
void print(const std::string& s = "") const;
|
||||
bool equals(const Factor<VectorConfig>& lf, double tol = 1e-9) const;
|
||||
|
||||
// Implementing Factor virtual functions
|
||||
bool equals(const GaussianFactor& lf, double tol = 1e-9) const;
|
||||
|
||||
Vector unweighted_error(const VectorConfig& c) const; /** (A*x-b) */
|
||||
Vector error_vector(const VectorConfig& c) const; /** (A*x-b)/sigma */
|
||||
double error(const VectorConfig& c) const; /** 0.5*(A*x-b)'*D*(A*x-b) */
|
||||
std::size_t size() const { return As_.size();}
|
||||
|
||||
/** STL like, return the iterator pointing to the first node */
|
||||
const_iterator const begin() const { return As_.begin();}
|
||||
/** Check if the factor contains no information, i.e. zero rows. This does
|
||||
* not necessarily mean that the factor involves no variables (to check for
|
||||
* involving no variables use keys().empty()).
|
||||
*/
|
||||
bool empty() const { return Ab_.size1() == 0;}
|
||||
|
||||
/** STL like, return the iterator pointing to the last node */
|
||||
const_iterator const end() const { return As_.end(); }
|
||||
/** Get a view of the r.h.s. vector b */
|
||||
ab_type::const_column_type getb() const { return Ab_.column(size(), 0); }
|
||||
ab_type::column_type getb() { return Ab_.column(size(), 0); }
|
||||
|
||||
/** check if empty */
|
||||
bool empty() const { return b_.size() == 0;}
|
||||
/** Get a view of the A matrix for the variable pointed to be the given key iterator */
|
||||
ab_type::const_block_type getA(const_iterator variable) const { return Ab_(variable - keys_.begin()); }
|
||||
ab_type::block_type getA(iterator variable) { return Ab_(variable - keys_.begin()); }
|
||||
|
||||
/** get a copy of b */
|
||||
const Vector& get_b() const { return b_; }
|
||||
/** Return the dimension of the variable pointed to by the given key iterator
|
||||
* todo: Remove this in favor of keeping track of dimensions with variables?
|
||||
*/
|
||||
size_t getDim(const_iterator variable) const { return Ab_(variable - keys_.begin()).size2(); }
|
||||
|
||||
/**
|
||||
* Permutes the GaussianFactor, but for efficiency requires the permutation
|
||||
* to already be inverted. This acts just as a change-of-name for each
|
||||
* variable. The order of the variables within the factor is not changed.
|
||||
*/
|
||||
void permuteWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** Named constructor for combining a set of factors with pre-computed set of
|
||||
* variables. */
|
||||
template<class Storage>
|
||||
static shared_ptr Combine(const GaussianFactorGraph& factorGraph,
|
||||
const GaussianVariableIndex<Storage>& variableIndex, const std::vector<size_t>& factors,
|
||||
const std::vector<varid_t>& variables, const std::vector<std::vector<size_t> >& variablePositions);
|
||||
|
||||
/**
|
||||
* Named constructor for combining a set of factors with pre-computed set of
|
||||
* variables.
|
||||
*/
|
||||
static shared_ptr Combine(const GaussianFactorGraph& factors, const VariableSlots& variableSlots);
|
||||
|
||||
protected:
|
||||
|
||||
/** Protected mutable accessor for the r.h.s. b. */
|
||||
|
||||
/** Internal debug check to make sure variables are sorted */
|
||||
void checkSorted() const;
|
||||
|
||||
public:
|
||||
|
||||
/** get a copy of sigmas */
|
||||
const Vector& get_sigmas() const { return model_->sigmas(); }
|
||||
|
|
@ -114,85 +163,17 @@ public:
|
|||
/** get a copy of model */
|
||||
const SharedDiagonal& get_model() const { return model_; }
|
||||
|
||||
/**
|
||||
* get a copy of the A matrix from a specific node
|
||||
* O(log n)
|
||||
*/
|
||||
const Matrix& get_A(const Symbol& key) const {
|
||||
return As_.at(key);
|
||||
}
|
||||
|
||||
/** erase the A associated with the input key */
|
||||
size_t erase_A(const Symbol& key) { return As_.erase(key); }
|
||||
|
||||
/** operator[] syntax for get */
|
||||
inline const Matrix& operator[](const Symbol& name) const {
|
||||
return get_A(name);
|
||||
}
|
||||
|
||||
/** Check if factor involves variable with key */
|
||||
bool involves(const Symbol& key) const {
|
||||
const_iterator it = As_.find(key);
|
||||
return (it != As_.end());
|
||||
}
|
||||
|
||||
/**
|
||||
* return the number of rows from the b vector
|
||||
* @return a integer with the number of rows from the b vector
|
||||
*/
|
||||
int numberOfRows() const { return b_.size();}
|
||||
|
||||
/**
|
||||
* Find all variables
|
||||
* @return The set of all variable keys
|
||||
*/
|
||||
std::list<Symbol> keys() const;
|
||||
|
||||
/**
|
||||
* return the first key
|
||||
* TODO: this function should be removed and the minimum spanning tree code
|
||||
* modified accordingly.
|
||||
* @return The set of all variable keys
|
||||
*/
|
||||
Symbol key1() const { return As_.begin()->first; }
|
||||
|
||||
/**
|
||||
* return the first key
|
||||
* TODO: this function should be removed and the minimum spanning tree code
|
||||
* modified accordingly.
|
||||
* @return The set of all variable keys
|
||||
*/
|
||||
Symbol key2() const {
|
||||
if (As_.size() < 2) throw std::invalid_argument("GaussianFactor: less than 2 keys!");
|
||||
return (++(As_.begin()))->first;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all variables and their dimensions
|
||||
* @return The set of all variable/dimension pairs
|
||||
*/
|
||||
Dimensions dimensions() const;
|
||||
|
||||
/**
|
||||
* Get the dimension of a particular variable
|
||||
* @param key is the name of the variable
|
||||
* @return the size of the variable
|
||||
*/
|
||||
size_t getDim(const Symbol& key) const;
|
||||
|
||||
/**
|
||||
* Add to separator set if this factor involves key, but don't add key itself
|
||||
* @param key
|
||||
* @param separator set to add to
|
||||
*/
|
||||
void tally_separator(const Symbol& key,
|
||||
std::set<Symbol>& separator) const;
|
||||
size_t numberOfRows() const { return Ab_.size1(); }
|
||||
|
||||
/** Return A*x */
|
||||
Vector operator*(const VectorConfig& x) const;
|
||||
|
||||
/** Return A'*e */
|
||||
VectorConfig operator^(const Vector& e) const;
|
||||
// /** Return A'*e */
|
||||
// VectorConfig operator^(const Vector& e) const;
|
||||
|
||||
/** x += A'*e */
|
||||
void transposeMultiplyAdd(double alpha, const Vector& e, VectorConfig& x) const;
|
||||
|
|
@ -202,7 +183,7 @@ public:
|
|||
* @param ordering of variables needed for matrix column order
|
||||
* @param set weight to true to bake in the weights
|
||||
*/
|
||||
std::pair<Matrix, Vector> matrix(const Ordering& ordering, bool weight = true) const;
|
||||
std::pair<Matrix, Vector> matrix(bool weight = true) const;
|
||||
|
||||
/**
|
||||
* Return (dense) matrix associated with factor
|
||||
|
|
@ -210,7 +191,7 @@ public:
|
|||
* @param ordering of variables needed for matrix column order
|
||||
* @param set weight to use whitening to bake in weights
|
||||
*/
|
||||
Matrix matrix_augmented(const Ordering& ordering, bool weight = true) const;
|
||||
Matrix matrix_augmented(bool weight = true) const;
|
||||
|
||||
/**
|
||||
* Return vectors i, j, and s to generate an m-by-n sparse matrix
|
||||
|
|
@ -225,52 +206,12 @@ public:
|
|||
// MUTABLE functions. FD:on the path to being eradicated
|
||||
/* ************************************************************************* */
|
||||
|
||||
/** insert, copies A */
|
||||
void insert(const Symbol& key, const Matrix& A) {
|
||||
As_.insert(std::make_pair(key, A));
|
||||
}
|
||||
GaussianConditional::shared_ptr eliminateFirst();
|
||||
|
||||
/** set RHS, copies b */
|
||||
void set_b(const Vector& b) {
|
||||
this->b_ = b;
|
||||
}
|
||||
GaussianBayesNet::shared_ptr eliminate(varid_t nFrontals = 1);
|
||||
|
||||
// set A matrices for the linear factor, same as insert ?
|
||||
inline void set_A(const Symbol& key, const Matrix &A) {
|
||||
insert(key, A);
|
||||
}
|
||||
|
||||
/**
|
||||
* Current Implementation: Full QR factorization
|
||||
* eliminate (in place!) one of the variables connected to this factor
|
||||
* @param key the key of the node to be eliminated
|
||||
* @return a new factor and a conditional gaussian on the eliminated variable
|
||||
*/
|
||||
std::pair<boost::shared_ptr<GaussianConditional>, shared_ptr>
|
||||
eliminate(const Symbol& key) const;
|
||||
|
||||
/**
|
||||
* Performs elimination given an augmented matrix
|
||||
* @param
|
||||
*/
|
||||
static std::pair<GaussianBayesNet, shared_ptr>
|
||||
eliminateMatrix(Matrix& Ab, SharedDiagonal model,
|
||||
const Ordering& frontal, const Ordering& separator,
|
||||
const Dimensions& dimensions);
|
||||
|
||||
static std::pair<GaussianConditional::shared_ptr, shared_ptr>
|
||||
eliminateMatrix(Matrix& Ab, SharedDiagonal model,
|
||||
const Symbol& frontal, const Ordering& separator,
|
||||
const Dimensions& dimensions);
|
||||
|
||||
|
||||
/**
|
||||
* Take the factor f, and append to current matrices. Not very general.
|
||||
* @param f linear factor graph
|
||||
* @param m final number of rows of f, needs to be known in advance
|
||||
* @param pos where to insert in the m-sized matrices
|
||||
*/
|
||||
void append_factor(GaussianFactor::shared_ptr f, size_t m, size_t pos);
|
||||
friend class GaussianFactorGraph;
|
||||
friend class Inference;
|
||||
|
||||
}; // GaussianFactor
|
||||
|
||||
|
|
|
|||
|
|
@ -9,14 +9,13 @@
|
|||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/numeric/ublas/lu.hpp>
|
||||
#include <boost/numeric/ublas/io.hpp>
|
||||
#include <boost/assign/std/list.hpp> // for operator += in Ordering
|
||||
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/GaussianFactorSet.h>
|
||||
#include <gtsam/inference/FactorGraph-inl.h>
|
||||
#include <gtsam/inference/inference-inl.h>
|
||||
#include <gtsam/linear/iterative.h>
|
||||
#include <gtsam/linear/GaussianJunctionTree.h>
|
||||
//#include <gtsam/linear/GaussianJunctionTree.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
|
@ -35,6 +34,22 @@ GaussianFactorGraph::GaussianFactorGraph(const GaussianBayesNet& CBN) :
|
|||
FactorGraph<GaussianFactor> (CBN) {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> >
|
||||
GaussianFactorGraph::keys() const {
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> > keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this) {
|
||||
if(factor) keys.insert(factor->begin(), factor->end()); }
|
||||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void GaussianFactorGraph::permuteWithInverse(const Permutation& inversePermutation) {
|
||||
BOOST_FOREACH(const sharedFactor& factor, factors_) {
|
||||
factor->permuteWithInverse(inversePermutation);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double GaussianFactorGraph::error(const VectorConfig& x) const {
|
||||
double total_error = 0.;
|
||||
|
|
@ -79,17 +94,17 @@ void GaussianFactorGraph::multiplyInPlace(const VectorConfig& x,
|
|||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::operator^(const Errors& e) const {
|
||||
VectorConfig x;
|
||||
// For each factor add the gradient contribution
|
||||
Errors::const_iterator it = e.begin();
|
||||
BOOST_FOREACH(const sharedFactor& Ai,factors_) {
|
||||
VectorConfig xi = (*Ai)^(*(it++));
|
||||
x.insertAdd(xi);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::operator^(const Errors& e) const {
|
||||
// VectorConfig x;
|
||||
// // For each factor add the gradient contribution
|
||||
// Errors::const_iterator it = e.begin();
|
||||
// BOOST_FOREACH(const sharedFactor& Ai,factors_) {
|
||||
// VectorConfig xi = (*Ai)^(*(it++));
|
||||
// x.insertAdd(xi);
|
||||
// }
|
||||
// return x;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// x += alpha*A'*e
|
||||
|
|
@ -101,227 +116,206 @@ void GaussianFactorGraph::transposeMultiplyAdd(double alpha, const Errors& e,
|
|||
Ai->transposeMultiplyAdd(alpha,*(ei++),x);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::gradient(const VectorConfig& x) const {
|
||||
// It is crucial for performance to make a zero-valued clone of x
|
||||
VectorConfig g = VectorConfig::zero(x);
|
||||
transposeMultiplyAdd(1.0, errors(x), g);
|
||||
return g;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::gradient(const VectorConfig& x) const {
|
||||
// // It is crucial for performance to make a zero-valued clone of x
|
||||
// VectorConfig g = VectorConfig::zero(x);
|
||||
// transposeMultiplyAdd(1.0, errors(x), g);
|
||||
// return g;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
set<Symbol> GaussianFactorGraph::find_separator(const Symbol& key) const
|
||||
{
|
||||
set<Symbol> separator;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
factor->tally_separator(key,separator);
|
||||
///* ************************************************************************* */
|
||||
//set<varid_t> GaussianFactorGraph::find_separator(varid_t key) const
|
||||
//{
|
||||
// set<varid_t> separator;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
// factor->tally_separator(key,separator);
|
||||
//
|
||||
// return separator;
|
||||
//}
|
||||
|
||||
return separator;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//template <class Factors>
|
||||
//std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
// const Factors& factors,
|
||||
// const Ordering& order, const Dimensions& dimensions) {
|
||||
// // find the size of Ab
|
||||
// size_t m = 0, n = 1;
|
||||
//
|
||||
// // number of rows
|
||||
// BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
// m += factor->numberOfRows();
|
||||
// }
|
||||
//
|
||||
// // find the number of columns
|
||||
// BOOST_FOREACH(varid_t key, order) {
|
||||
// n += dimensions.at(key);
|
||||
// }
|
||||
//
|
||||
// // Allocate the new matrix
|
||||
// Matrix Ab = zeros(m,n);
|
||||
//
|
||||
// // Allocate a sigmas vector to make into a full noisemodel
|
||||
// Vector sigmas = ones(m);
|
||||
//
|
||||
// // copy data over
|
||||
// size_t cur_m = 0;
|
||||
// bool constrained = false;
|
||||
// bool unit = true;
|
||||
// BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
// // loop through ordering
|
||||
// size_t cur_n = 0;
|
||||
// BOOST_FOREACH(varid_t key, order) {
|
||||
// // copy over matrix if it exists
|
||||
// if (factor->involves(key)) {
|
||||
// insertSub(Ab, factor->get_A(key), cur_m, cur_n);
|
||||
// }
|
||||
// // move onto next element
|
||||
// cur_n += dimensions.at(key);
|
||||
// }
|
||||
// // copy over the RHS
|
||||
// insertColumn(Ab, factor->get_b(), cur_m, n-1);
|
||||
//
|
||||
// // check if the model is unit already
|
||||
// if (!boost::shared_dynamic_cast<noiseModel::Unit>(factor->get_model())) {
|
||||
// unit = false;
|
||||
// const Vector& subsigmas = factor->get_model()->sigmas();
|
||||
// subInsert(sigmas, subsigmas, cur_m);
|
||||
//
|
||||
// // check for constraint
|
||||
// if (boost::shared_dynamic_cast<noiseModel::Constrained>(factor->get_model()))
|
||||
// constrained = true;
|
||||
// }
|
||||
//
|
||||
// // move to next row
|
||||
// cur_m += factor->numberOfRows();
|
||||
// }
|
||||
//
|
||||
// // combine the noisemodels
|
||||
// SharedDiagonal model;
|
||||
// if (unit) {
|
||||
// model = noiseModel::Unit::Create(m);
|
||||
// } else if (constrained) {
|
||||
// model = noiseModel::Constrained::MixedSigmas(sigmas);
|
||||
// } else {
|
||||
// model = noiseModel::Diagonal::Sigmas(sigmas);
|
||||
// }
|
||||
// return make_pair(Ab, model);
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::shared_ptr
|
||||
GaussianFactorGraph::eliminateOne(const Symbol& key, bool enableJoinFactor) {
|
||||
if (enableJoinFactor)
|
||||
return gtsam::eliminateOne<GaussianFactor,GaussianConditional>(*this, key);
|
||||
else
|
||||
return eliminateOneMatrixJoin(key);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class Factors>
|
||||
std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
const Factors& factors,
|
||||
const Ordering& order, const Dimensions& dimensions) {
|
||||
// find the size of Ab
|
||||
size_t m = 0, n = 1;
|
||||
|
||||
// number of rows
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// find the number of columns
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
n += dimensions.at(key);
|
||||
}
|
||||
|
||||
// Allocate the new matrix
|
||||
Matrix Ab = zeros(m,n);
|
||||
|
||||
// Allocate a sigmas vector to make into a full noisemodel
|
||||
Vector sigmas = ones(m);
|
||||
|
||||
// copy data over
|
||||
size_t cur_m = 0;
|
||||
bool constrained = false;
|
||||
bool unit = true;
|
||||
BOOST_FOREACH(GaussianFactor::shared_ptr factor, factors) {
|
||||
// loop through ordering
|
||||
size_t cur_n = 0;
|
||||
BOOST_FOREACH(const Symbol& key, order) {
|
||||
// copy over matrix if it exists
|
||||
if (factor->involves(key)) {
|
||||
insertSub(Ab, factor->get_A(key), cur_m, cur_n);
|
||||
}
|
||||
// move onto next element
|
||||
cur_n += dimensions.at(key);
|
||||
}
|
||||
// copy over the RHS
|
||||
insertColumn(Ab, factor->get_b(), cur_m, n-1);
|
||||
|
||||
// check if the model is unit already
|
||||
if (!boost::shared_dynamic_cast<noiseModel::Unit>(factor->get_model())) {
|
||||
unit = false;
|
||||
const Vector& subsigmas = factor->get_model()->sigmas();
|
||||
subInsert(sigmas, subsigmas, cur_m);
|
||||
|
||||
// check for constraint
|
||||
if (boost::shared_dynamic_cast<noiseModel::Constrained>(factor->get_model()))
|
||||
constrained = true;
|
||||
}
|
||||
|
||||
// move to next row
|
||||
cur_m += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// combine the noisemodels
|
||||
SharedDiagonal model;
|
||||
if (unit) {
|
||||
model = noiseModel::Unit::Create(m);
|
||||
} else if (constrained) {
|
||||
model = noiseModel::Constrained::MixedSigmas(sigmas);
|
||||
} else {
|
||||
model = noiseModel::Diagonal::Sigmas(sigmas);
|
||||
}
|
||||
return make_pair(Ab, model);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianConditional::shared_ptr
|
||||
GaussianFactorGraph::eliminateOneMatrixJoin(const Symbol& key) {
|
||||
// find and remove all factors connected to key
|
||||
vector<GaussianFactor::shared_ptr> factors = findAndRemoveFactors(key);
|
||||
|
||||
// Collect all dimensions as well as the set of separator keys
|
||||
set<Symbol> separator;
|
||||
Dimensions dimensions;
|
||||
BOOST_FOREACH(const sharedFactor& factor, factors) {
|
||||
Dimensions factor_dim = factor->dimensions();
|
||||
dimensions.insert(factor_dim.begin(), factor_dim.end());
|
||||
BOOST_FOREACH(const Symbol& k, factor->keys())
|
||||
if (!k.equals(key))
|
||||
separator.insert(k);
|
||||
}
|
||||
|
||||
// add the keys to the rendering
|
||||
Ordering render; render += key;
|
||||
BOOST_FOREACH(const Symbol& k, separator)
|
||||
if (k != key) render += k;
|
||||
|
||||
// combine the factors to get a noisemodel and a combined matrix
|
||||
Matrix Ab; SharedDiagonal model;
|
||||
|
||||
boost::tie(Ab, model) = combineFactorsAndCreateMatrix(factors,render,dimensions);
|
||||
|
||||
// eliminate that joint factor
|
||||
GaussianFactor::shared_ptr factor;
|
||||
GaussianConditional::shared_ptr conditional;
|
||||
render.pop_front();
|
||||
boost::tie(conditional, factor) =
|
||||
GaussianFactor::eliminateMatrix(Ab, model, key, render, dimensions);
|
||||
|
||||
// add new factor on separator back into the graph
|
||||
if (!factor->empty()) push_back(factor);
|
||||
|
||||
// return the conditional Gaussian
|
||||
return conditional;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesNet
|
||||
GaussianFactorGraph::eliminate(const Ordering& ordering, bool enableJoinFactor)
|
||||
{
|
||||
GaussianBayesNet chordalBayesNet; // empty
|
||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
||||
GaussianConditional::shared_ptr cg = eliminateOne(key, enableJoinFactor);
|
||||
chordalBayesNet.push_back(cg);
|
||||
}
|
||||
return chordalBayesNet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianBayesNet
|
||||
GaussianFactorGraph::eliminateFrontals(const Ordering& frontals)
|
||||
{
|
||||
// find the factors that contain at least one of the frontal variables
|
||||
Dimensions dimensions = this->dimensions();
|
||||
|
||||
// collect separator
|
||||
Ordering separator;
|
||||
set<Symbol> frontal_set(frontals.begin(), frontals.end());
|
||||
BOOST_FOREACH(const Symbol& key, this->keys()) {
|
||||
if (frontal_set.find(key) == frontal_set.end())
|
||||
separator.push_back(key);
|
||||
}
|
||||
|
||||
Matrix Ab; SharedDiagonal model;
|
||||
Ordering ord = frontals;
|
||||
ord.insert(ord.end(), separator.begin(), separator.end());
|
||||
boost::tie(Ab, model) = combineFactorsAndCreateMatrix(*this, ord, dimensions);
|
||||
|
||||
// eliminate that joint factor
|
||||
GaussianFactor::shared_ptr factor;
|
||||
GaussianBayesNet bn;
|
||||
boost::tie(bn, factor) =
|
||||
GaussianFactor::eliminateMatrix(Ab, model, frontals, separator, dimensions);
|
||||
|
||||
// add new factor on separator back into the graph
|
||||
*this = GaussianFactorGraph();
|
||||
if (!factor->empty()) push_back(factor);
|
||||
|
||||
return bn;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//GaussianConditional::shared_ptr
|
||||
//GaussianFactorGraph::eliminateOneMatrixJoin(varid_t key) {
|
||||
// // find and remove all factors connected to key
|
||||
// vector<GaussianFactor::shared_ptr> factors = findAndRemoveFactors(key);
|
||||
//
|
||||
// // Collect all dimensions as well as the set of separator keys
|
||||
// set<varid_t> separator;
|
||||
// Dimensions dimensions;
|
||||
// BOOST_FOREACH(const sharedFactor& factor, factors) {
|
||||
// Dimensions factor_dim = factor->dimensions();
|
||||
// dimensions.insert(factor_dim.begin(), factor_dim.end());
|
||||
// BOOST_FOREACH(varid_t k, factor->keys())
|
||||
// if (!k == key)
|
||||
// separator.insert(k);
|
||||
// }
|
||||
//
|
||||
// // add the keys to the rendering
|
||||
// Ordering render; render += key;
|
||||
// BOOST_FOREACH(varid_t k, separator)
|
||||
// if (k != key) render += k;
|
||||
//
|
||||
// // combine the factors to get a noisemodel and a combined matrix
|
||||
// Matrix Ab; SharedDiagonal model;
|
||||
//
|
||||
// boost::tie(Ab, model) = combineFactorsAndCreateMatrix(factors,render,dimensions);
|
||||
//
|
||||
// // eliminate that joint factor
|
||||
// GaussianFactor::shared_ptr factor;
|
||||
// GaussianConditional::shared_ptr conditional;
|
||||
// render.pop_front();
|
||||
// boost::tie(conditional, factor) =
|
||||
// GaussianFactor::eliminateMatrix(Ab, model, key, render, dimensions);
|
||||
//
|
||||
// // add new factor on separator back into the graph
|
||||
// if (!factor->empty()) push_back(factor);
|
||||
//
|
||||
// // return the conditional Gaussian
|
||||
// return conditional;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//GaussianBayesNet
|
||||
//GaussianFactorGraph::eliminateFrontals(const Ordering& frontals)
|
||||
//{
|
||||
// // find the factors that contain at least one of the frontal variables
|
||||
// Dimensions dimensions = this->dimensions();
|
||||
//
|
||||
// // collect separator
|
||||
// Ordering separator;
|
||||
// set<varid_t> frontal_set(frontals.begin(), frontals.end());
|
||||
// BOOST_FOREACH(varid_t key, this->keys()) {
|
||||
// if (frontal_set.find(key) == frontal_set.end())
|
||||
// separator.push_back(key);
|
||||
// }
|
||||
//
|
||||
// Matrix Ab; SharedDiagonal model;
|
||||
// Ordering ord = frontals;
|
||||
// ord.insert(ord.end(), separator.begin(), separator.end());
|
||||
// boost::tie(Ab, model) = combineFactorsAndCreateMatrix(*this, ord, dimensions);
|
||||
//
|
||||
// // eliminate that joint factor
|
||||
// GaussianFactor::shared_ptr factor;
|
||||
// GaussianBayesNet bn;
|
||||
// boost::tie(bn, factor) =
|
||||
// GaussianFactor::eliminateMatrix(Ab, model, frontals, separator, dimensions);
|
||||
//
|
||||
// // add new factor on separator back into the graph
|
||||
// *this = GaussianFactorGraph();
|
||||
// if (!factor->empty()) push_back(factor);
|
||||
//
|
||||
// return bn;
|
||||
//}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::optimize(const Ordering& ordering, bool old)
|
||||
{
|
||||
// eliminate all nodes in the given ordering -> chordal Bayes net
|
||||
GaussianBayesNet chordalBayesNet = eliminate(ordering, old);
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::optimize(const Ordering& ordering, bool old)
|
||||
//{
|
||||
// // eliminate all nodes in the given ordering -> chordal Bayes net
|
||||
// GaussianBayesNet chordalBayesNet = eliminate(ordering, old);
|
||||
//
|
||||
// // calculate new configuration (using backsubstitution)
|
||||
// VectorConfig delta = ::optimize(chordalBayesNet);
|
||||
// return delta;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::optimizeMultiFrontals(const Ordering& ordering)
|
||||
//{
|
||||
// GaussianJunctionTree junctionTree(*this, ordering);
|
||||
//
|
||||
// // calculate new configuration (using backsubstitution)
|
||||
// VectorConfig delta = junctionTree.optimize();
|
||||
// return delta;
|
||||
//}
|
||||
|
||||
// calculate new configuration (using backsubstitution)
|
||||
VectorConfig delta = ::optimize(chordalBayesNet);
|
||||
return delta;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::optimizeMultiFrontals(const Ordering& ordering)
|
||||
{
|
||||
GaussianJunctionTree junctionTree(*this, ordering);
|
||||
|
||||
// calculate new configuration (using backsubstitution)
|
||||
VectorConfig delta = junctionTree.optimize();
|
||||
return delta;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<GaussianBayesNet>
|
||||
GaussianFactorGraph::eliminate_(const Ordering& ordering)
|
||||
{
|
||||
boost::shared_ptr<GaussianBayesNet> chordalBayesNet(new GaussianBayesNet); // empty
|
||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
||||
GaussianConditional::shared_ptr cg = eliminateOne(key);
|
||||
chordalBayesNet->push_back(cg);
|
||||
}
|
||||
return chordalBayesNet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<VectorConfig>
|
||||
GaussianFactorGraph::optimize_(const Ordering& ordering) {
|
||||
return boost::shared_ptr<VectorConfig>(new VectorConfig(optimize(ordering)));
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//boost::shared_ptr<GaussianBayesNet>
|
||||
//GaussianFactorGraph::eliminate_(const Ordering& ordering)
|
||||
//{
|
||||
// boost::shared_ptr<GaussianBayesNet> chordalBayesNet(new GaussianBayesNet); // empty
|
||||
// BOOST_FOREACH(varid_t key, ordering) {
|
||||
// GaussianConditional::shared_ptr cg = eliminateOne(key);
|
||||
// chordalBayesNet->push_back(cg);
|
||||
// }
|
||||
// return chordalBayesNet;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//boost::shared_ptr<VectorConfig>
|
||||
//GaussianFactorGraph::optimize_(const Ordering& ordering) {
|
||||
// return boost::shared_ptr<VectorConfig>(new VectorConfig(optimize(ordering)));
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void GaussianFactorGraph::combine(const GaussianFactorGraph &lfg){
|
||||
|
|
@ -345,209 +339,211 @@ GaussianFactorGraph GaussianFactorGraph::combine2(const GaussianFactorGraph& lfg
|
|||
return fg;
|
||||
}
|
||||
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//Dimensions GaussianFactorGraph::dimensions() const {
|
||||
// Dimensions result;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_) {
|
||||
// Dimensions vs = factor->dimensions();
|
||||
// varid_t key; size_t dim;
|
||||
// FOREACH_PAIR(key,dim,vs) result.insert(make_pair(key,dim));
|
||||
// }
|
||||
// return result;
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Dimensions GaussianFactorGraph::dimensions() const {
|
||||
Dimensions result;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_) {
|
||||
Dimensions vs = factor->dimensions();
|
||||
Symbol key; size_t dim;
|
||||
FOREACH_PAIR(key,dim,vs) result.insert(make_pair(key,dim));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianFactorGraph GaussianFactorGraph::add_priors(double sigma) const {
|
||||
GaussianFactorGraph GaussianFactorGraph::add_priors(double sigma, const GaussianVariableIndex<>& variableIndex) const {
|
||||
|
||||
// start with this factor graph
|
||||
GaussianFactorGraph result = *this;
|
||||
|
||||
// find all variables and their dimensions
|
||||
Dimensions vs = dimensions();
|
||||
|
||||
// for each of the variables, add a prior
|
||||
Symbol key; size_t dim;
|
||||
FOREACH_PAIR(key,dim,vs) {
|
||||
|
||||
for(varid_t var=0; var<variableIndex.size(); ++var) {
|
||||
const GaussianVariableIndex<>::mapped_factor_type& factor_pos(variableIndex[var].front());
|
||||
const GaussianFactor& factor(*operator[](factor_pos.factorIndex));
|
||||
size_t dim = factor.getDim(factor.begin() + factor_pos.variablePosition);
|
||||
Matrix A = eye(dim);
|
||||
Vector b = zero(dim);
|
||||
SharedDiagonal model = noiseModel::Isotropic::Sigma(dim,sigma);
|
||||
sharedFactor prior(new GaussianFactor(key,A,b, model));
|
||||
sharedFactor prior(new GaussianFactor(var,A,b, model));
|
||||
result.push_back(prior);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Errors GaussianFactorGraph::rhs() const {
|
||||
Errors e;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
e.push_back(ediv(factor->get_b(),factor->get_sigmas()));
|
||||
return e;
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//Errors GaussianFactorGraph::rhs() const {
|
||||
// Errors e;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
// e.push_back(ediv(factor->get_b(),factor->get_sigmas()));
|
||||
// return e;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//Vector GaussianFactorGraph::rhsVector() const {
|
||||
// Errors e = rhs();
|
||||
// return concatVectors(e);
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//pair<Matrix,Vector> GaussianFactorGraph::matrix(const Ordering& ordering) const {
|
||||
//
|
||||
// // get all factors
|
||||
// GaussianFactorSet found;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
// found.push_back(factor);
|
||||
//
|
||||
// // combine them
|
||||
// GaussianFactor lf(found);
|
||||
//
|
||||
// // Return Matrix and Vector
|
||||
// return lf.matrix(ordering);
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector GaussianFactorGraph::rhsVector() const {
|
||||
Errors e = rhs();
|
||||
return concatVectors(e);
|
||||
}
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::assembleConfig(const Vector& vs, const Ordering& ordering) const {
|
||||
// Dimensions dims = dimensions();
|
||||
// VectorConfig config;
|
||||
// Vector::const_iterator itSrc = vs.begin();
|
||||
// Vector::iterator itDst;
|
||||
// BOOST_FOREACH(varid_t key, ordering){
|
||||
// int dim = dims.find(key)->second;
|
||||
// Vector v(dim);
|
||||
// for (itDst=v.begin(); itDst!=v.end(); itDst++, itSrc++)
|
||||
// *itDst = *itSrc;
|
||||
// config.insert(key, v);
|
||||
// }
|
||||
// if (itSrc != vs.end())
|
||||
// throw runtime_error("assembleConfig: input vector and ordering are not compatible with the graph");
|
||||
// return config;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//pair<Dimensions, size_t> GaussianFactorGraph::columnIndices_(const Ordering& ordering) const {
|
||||
//
|
||||
// // get the dimensions for all variables
|
||||
// Dimensions variableSet = dimensions();
|
||||
//
|
||||
// // Find the starting index and dimensions for all variables given the order
|
||||
// size_t j = 1;
|
||||
// Dimensions result;
|
||||
// BOOST_FOREACH(varid_t key, ordering) {
|
||||
// // associate key with first column index
|
||||
// result.insert(make_pair(key,j));
|
||||
// // find dimension for this key
|
||||
// Dimensions::const_iterator it = variableSet.find(key);
|
||||
// if (it==variableSet.end()) // key not found, now what ?
|
||||
// throw invalid_argument("ColumnIndices: this ordering contains keys not in the graph");
|
||||
// // advance column index to next block by adding dim(key)
|
||||
// j += it->second;
|
||||
// }
|
||||
//
|
||||
// return make_pair(result, j-1);
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//Dimensions GaussianFactorGraph::columnIndices(const Ordering& ordering) const {
|
||||
// return columnIndices_(ordering).first;
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//pair<size_t, size_t> GaussianFactorGraph::sizeOfA() const {
|
||||
// size_t m = 0, n = 0;
|
||||
// Dimensions variableSet = dimensions();
|
||||
// BOOST_FOREACH(const Dimensions::value_type value, variableSet)
|
||||
// n += value.second;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
// m += factor->numberOfRows();
|
||||
// return make_pair(m, n);
|
||||
//}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<Matrix,Vector> GaussianFactorGraph::matrix(const Ordering& ordering) const {
|
||||
///* ************************************************************************* */
|
||||
//Matrix GaussianFactorGraph::sparse(const Ordering& ordering) const {
|
||||
//
|
||||
// // get the starting column indices for all variables
|
||||
// Dimensions indices = columnIndices(ordering);
|
||||
//
|
||||
// return sparse(indices);
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//Matrix GaussianFactorGraph::sparse(const Dimensions& indices) const {
|
||||
//
|
||||
// // return values
|
||||
// list<int> I,J;
|
||||
// list<double> S;
|
||||
//
|
||||
// // Collect the I,J,S lists for all factors
|
||||
// int row_index = 0;
|
||||
// BOOST_FOREACH(const sharedFactor& factor,factors_) {
|
||||
//
|
||||
// // get sparse lists for the factor
|
||||
// list<int> i1,j1;
|
||||
// list<double> s1;
|
||||
// boost::tie(i1,j1,s1) = factor->sparse(indices);
|
||||
//
|
||||
// // add row_start to every row index
|
||||
// transform(i1.begin(), i1.end(), i1.begin(), bind2nd(plus<int>(), row_index));
|
||||
//
|
||||
// // splice lists from factor to the end of the global lists
|
||||
// I.splice(I.end(), i1);
|
||||
// J.splice(J.end(), j1);
|
||||
// S.splice(S.end(), s1);
|
||||
//
|
||||
// // advance row start
|
||||
// row_index += factor->numberOfRows();
|
||||
// }
|
||||
//
|
||||
// // Convert them to vectors for MATLAB
|
||||
// // TODO: just create a sparse matrix class already
|
||||
// size_t nzmax = S.size();
|
||||
// Matrix ijs(3,nzmax);
|
||||
// copy(I.begin(),I.end(),ijs.begin2());
|
||||
// copy(J.begin(),J.end(),ijs.begin2()+nzmax);
|
||||
// copy(S.begin(),S.end(),ijs.begin2()+2*nzmax);
|
||||
//
|
||||
// // return the result
|
||||
// return ijs;
|
||||
//}
|
||||
|
||||
// get all factors
|
||||
GaussianFactorSet found;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
found.push_back(factor);
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::steepestDescent(const VectorConfig& x0,
|
||||
// bool verbose, double epsilon, size_t maxIterations) const {
|
||||
// return gtsam::steepestDescent(*this, x0, verbose, epsilon, maxIterations);
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//boost::shared_ptr<VectorConfig> GaussianFactorGraph::steepestDescent_(
|
||||
// const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
// return boost::shared_ptr<VectorConfig>(new VectorConfig(
|
||||
// gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
// maxIterations)));
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//VectorConfig GaussianFactorGraph::conjugateGradientDescent(
|
||||
// const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
// return gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
// maxIterations);
|
||||
//}
|
||||
//
|
||||
///* ************************************************************************* */
|
||||
//boost::shared_ptr<VectorConfig> GaussianFactorGraph::conjugateGradientDescent_(
|
||||
// const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
// return boost::shared_ptr<VectorConfig>(new VectorConfig(
|
||||
// gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
// maxIterations)));
|
||||
//}
|
||||
|
||||
// combine them
|
||||
GaussianFactor lf(found);
|
||||
|
||||
// Return Matrix and Vector
|
||||
return lf.matrix(ordering);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::assembleConfig(const Vector& vs, const Ordering& ordering) const {
|
||||
Dimensions dims = dimensions();
|
||||
VectorConfig config;
|
||||
Vector::const_iterator itSrc = vs.begin();
|
||||
Vector::iterator itDst;
|
||||
BOOST_FOREACH(const Symbol& key, ordering){
|
||||
int dim = dims.find(key)->second;
|
||||
Vector v(dim);
|
||||
for (itDst=v.begin(); itDst!=v.end(); itDst++, itSrc++)
|
||||
*itDst = *itSrc;
|
||||
config.insert(key, v);
|
||||
}
|
||||
if (itSrc != vs.end())
|
||||
throw runtime_error("assembleConfig: input vector and ordering are not compatible with the graph");
|
||||
return config;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<Dimensions, size_t> GaussianFactorGraph::columnIndices_(const Ordering& ordering) const {
|
||||
|
||||
// get the dimensions for all variables
|
||||
Dimensions variableSet = dimensions();
|
||||
|
||||
// Find the starting index and dimensions for all variables given the order
|
||||
size_t j = 1;
|
||||
Dimensions result;
|
||||
BOOST_FOREACH(const Symbol& key, ordering) {
|
||||
// associate key with first column index
|
||||
result.insert(make_pair(key,j));
|
||||
// find dimension for this key
|
||||
Dimensions::const_iterator it = variableSet.find(key);
|
||||
if (it==variableSet.end()) // key not found, now what ?
|
||||
throw invalid_argument("ColumnIndices: this ordering contains keys not in the graph");
|
||||
// advance column index to next block by adding dim(key)
|
||||
j += it->second;
|
||||
}
|
||||
|
||||
return make_pair(result, j-1);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Dimensions GaussianFactorGraph::columnIndices(const Ordering& ordering) const {
|
||||
return columnIndices_(ordering).first;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<size_t, size_t> GaussianFactorGraph::sizeOfA() const {
|
||||
size_t m = 0, n = 0;
|
||||
Dimensions variableSet = dimensions();
|
||||
BOOST_FOREACH(const Dimensions::value_type value, variableSet)
|
||||
n += value.second;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_)
|
||||
m += factor->numberOfRows();
|
||||
return make_pair(m, n);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix GaussianFactorGraph::sparse(const Ordering& ordering) const {
|
||||
|
||||
// get the starting column indices for all variables
|
||||
Dimensions indices = columnIndices(ordering);
|
||||
|
||||
return sparse(indices);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix GaussianFactorGraph::sparse(const Dimensions& indices) const {
|
||||
|
||||
// return values
|
||||
list<int> I,J;
|
||||
list<double> S;
|
||||
|
||||
// Collect the I,J,S lists for all factors
|
||||
int row_index = 0;
|
||||
BOOST_FOREACH(const sharedFactor& factor,factors_) {
|
||||
|
||||
// get sparse lists for the factor
|
||||
list<int> i1,j1;
|
||||
list<double> s1;
|
||||
boost::tie(i1,j1,s1) = factor->sparse(indices);
|
||||
|
||||
// add row_start to every row index
|
||||
transform(i1.begin(), i1.end(), i1.begin(), bind2nd(plus<int>(), row_index));
|
||||
|
||||
// splice lists from factor to the end of the global lists
|
||||
I.splice(I.end(), i1);
|
||||
J.splice(J.end(), j1);
|
||||
S.splice(S.end(), s1);
|
||||
|
||||
// advance row start
|
||||
row_index += factor->numberOfRows();
|
||||
}
|
||||
|
||||
// Convert them to vectors for MATLAB
|
||||
// TODO: just create a sparse matrix class already
|
||||
size_t nzmax = S.size();
|
||||
Matrix ijs(3,nzmax);
|
||||
copy(I.begin(),I.end(),ijs.begin2());
|
||||
copy(J.begin(),J.end(),ijs.begin2()+nzmax);
|
||||
copy(S.begin(),S.end(),ijs.begin2()+2*nzmax);
|
||||
|
||||
// return the result
|
||||
return ijs;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::steepestDescent(const VectorConfig& x0,
|
||||
bool verbose, double epsilon, size_t maxIterations) const {
|
||||
return gtsam::steepestDescent(*this, x0, verbose, epsilon, maxIterations);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<VectorConfig> GaussianFactorGraph::steepestDescent_(
|
||||
const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
return boost::shared_ptr<VectorConfig>(new VectorConfig(
|
||||
gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
maxIterations)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianFactorGraph::conjugateGradientDescent(
|
||||
const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
return gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
maxIterations);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<VectorConfig> GaussianFactorGraph::conjugateGradientDescent_(
|
||||
const VectorConfig& x0, bool verbose, double epsilon, size_t maxIterations) const {
|
||||
return boost::shared_ptr<VectorConfig>(new VectorConfig(
|
||||
gtsam::conjugateGradientDescent(*this, x0, verbose, epsilon,
|
||||
maxIterations)));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<vector<GaussianFactor::shared_ptr> >(
|
||||
const vector<GaussianFactor::shared_ptr>& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<GaussianFactorGraph>(
|
||||
const GaussianFactorGraph& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
//template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<vector<GaussianFactor::shared_ptr> >(
|
||||
// const vector<GaussianFactor::shared_ptr>& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
//
|
||||
//template std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix<GaussianFactorGraph>(
|
||||
// const GaussianFactorGraph& factors, const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -21,8 +21,6 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering;
|
||||
|
||||
/**
|
||||
* A Linear Factor Graph is a factor graph where all factors are Gaussian, i.e.
|
||||
* Factor == GaussianFactor
|
||||
|
|
@ -32,6 +30,10 @@ namespace gtsam {
|
|||
class GaussianFactorGraph : public FactorGraph<GaussianFactor> {
|
||||
public:
|
||||
|
||||
typedef boost::shared_ptr<GaussianFactorGraph> shared_ptr;
|
||||
typedef GaussianBayesNet bayesnet_type;
|
||||
typedef GaussianVariableIndex<> variableindex_type;
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
|
|
@ -48,32 +50,41 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/** Add a unary factor */
|
||||
inline void add(const Symbol& key1, const Matrix& A1,
|
||||
inline void add(varid_t key1, const Matrix& A1,
|
||||
const Vector& b, const SharedDiagonal& model) {
|
||||
push_back(sharedFactor(new GaussianFactor(key1,A1,b,model)));
|
||||
}
|
||||
|
||||
/** Add a binary factor */
|
||||
inline void add(const Symbol& key1, const Matrix& A1,
|
||||
const Symbol& key2, const Matrix& A2,
|
||||
inline void add(varid_t key1, const Matrix& A1,
|
||||
varid_t key2, const Matrix& A2,
|
||||
const Vector& b, const SharedDiagonal& model) {
|
||||
push_back(sharedFactor(new GaussianFactor(key1,A1,key2,A2,b,model)));
|
||||
}
|
||||
|
||||
/** Add a ternary factor */
|
||||
inline void add(const Symbol& key1, const Matrix& A1,
|
||||
const Symbol& key2, const Matrix& A2,
|
||||
const Symbol& key3, const Matrix& A3,
|
||||
inline void add(varid_t key1, const Matrix& A1,
|
||||
varid_t key2, const Matrix& A2,
|
||||
varid_t key3, const Matrix& A3,
|
||||
const Vector& b, const SharedDiagonal& model) {
|
||||
push_back(sharedFactor(new GaussianFactor(key1,A1,key2,A2,key3,A3,b,model)));
|
||||
}
|
||||
|
||||
/** Add an n-ary factor */
|
||||
inline void add(const std::vector<std::pair<Symbol, Matrix> > &terms,
|
||||
inline void add(const std::vector<std::pair<varid_t, Matrix> > &terms,
|
||||
const Vector &b, const SharedDiagonal& model) {
|
||||
push_back(sharedFactor(new GaussianFactor(terms,b,model)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the set of variables involved in the factors (computes a set
|
||||
* union).
|
||||
*/
|
||||
std::set<varid_t, std::less<varid_t>, boost::fast_pool_allocator<varid_t> > keys() const;
|
||||
|
||||
/** Permute the variables in the factors */
|
||||
void permuteWithInverse(const Permutation& inversePermutation);
|
||||
|
||||
/** return A*x-b */
|
||||
Errors errors(const VectorConfig& x) const;
|
||||
|
||||
|
|
@ -92,79 +103,61 @@ namespace gtsam {
|
|||
/* In-place version e <- A*x that takes an iterator. */
|
||||
void multiplyInPlace(const VectorConfig& x, const Errors::iterator& e) const;
|
||||
|
||||
/** return A^e */
|
||||
VectorConfig operator^(const Errors& e) const;
|
||||
// /** return A^e */
|
||||
// VectorConfig operator^(const Errors& e) const;
|
||||
|
||||
/** x += alpha*A'*e */
|
||||
void transposeMultiplyAdd(double alpha, const Errors& e, VectorConfig& x) const;
|
||||
|
||||
/**
|
||||
* Calculate Gradient of A^(A*x-b) for a given config
|
||||
* @param x: VectorConfig specifying where to calculate gradient
|
||||
* @return gradient, as a VectorConfig as well
|
||||
*/
|
||||
VectorConfig gradient(const VectorConfig& x) const;
|
||||
// /**
|
||||
// * Calculate Gradient of A^(A*x-b) for a given config
|
||||
// * @param x: VectorConfig specifying where to calculate gradient
|
||||
// * @return gradient, as a VectorConfig as well
|
||||
// */
|
||||
// VectorConfig gradient(const VectorConfig& x) const;
|
||||
|
||||
/** Unnormalized probability. O(n) */
|
||||
double probPrime(const VectorConfig& c) const {
|
||||
return exp(-0.5 * error(c));
|
||||
}
|
||||
|
||||
/**
|
||||
* find the separator, i.e. all the nodes that have at least one
|
||||
* common factor with the given node. FD: not used AFAIK.
|
||||
*/
|
||||
std::set<Symbol> find_separator(const Symbol& key) const;
|
||||
// /**
|
||||
// * find the separator, i.e. all the nodes that have at least one
|
||||
// * common factor with the given node. FD: not used AFAIK.
|
||||
// */
|
||||
// std::set<varid_t> find_separator(varid_t key) const;
|
||||
|
||||
/**
|
||||
* Eliminate a single node yielding a conditional Gaussian
|
||||
* Eliminates the factors from the factor graph through findAndRemoveFactors
|
||||
* and adds a new factor on the separator to the factor graph
|
||||
* @param key is the key to eliminate
|
||||
* @param enableJoinFactor uses the older joint factor combine process when true,
|
||||
* and when false uses the newer single matrix combine
|
||||
*/
|
||||
GaussianConditional::shared_ptr eliminateOne(const Symbol& key, bool enableJoinFactor = true);
|
||||
// /**
|
||||
// * Peforms a supposedly-faster (fewer matrix copy) version of elimination
|
||||
// * CURRENTLY IN TESTING
|
||||
// */
|
||||
// GaussianConditional::shared_ptr eliminateOneMatrixJoin(varid_t key);
|
||||
//
|
||||
//
|
||||
// /**
|
||||
// * Eliminate multiple variables at once, mostly used to eliminate frontal variables
|
||||
// */
|
||||
// GaussianBayesNet eliminateFrontals(const Ordering& frontals);
|
||||
|
||||
/**
|
||||
* Peforms a supposedly-faster (fewer matrix copy) version of elimination
|
||||
* CURRENTLY IN TESTING
|
||||
*/
|
||||
GaussianConditional::shared_ptr eliminateOneMatrixJoin(const Symbol& key);
|
||||
// /**
|
||||
// * optimize a linear factor graph
|
||||
// * @param ordering fg in order
|
||||
// * @param enableJoinFactor uses the older joint factor combine process when true,
|
||||
// * and when false uses the newer single matrix combine
|
||||
// */
|
||||
// VectorConfig optimize(const Ordering& ordering, bool enableJoinFactor = true);
|
||||
|
||||
/**
|
||||
* eliminate factor graph in place(!) in the given order, yielding
|
||||
* a chordal Bayes net. Allows for passing an incomplete ordering
|
||||
* that does not completely eliminate the graph
|
||||
* @param enableJoinFactor uses the older joint factor combine process when true,
|
||||
* and when false uses the newer single matrix combine
|
||||
*/
|
||||
GaussianBayesNet eliminate(const Ordering& ordering, bool enableJoinFactor = true);
|
||||
// /**
|
||||
// * optimize a linear factor graph using a multi-frontal solver
|
||||
// * @param ordering fg in order
|
||||
// */
|
||||
// VectorConfig optimizeMultiFrontals(const Ordering& ordering);
|
||||
|
||||
/**
|
||||
* Eliminate multiple variables at once, mostly used to eliminate frontal variables
|
||||
*/
|
||||
GaussianBayesNet eliminateFrontals(const Ordering& frontals);
|
||||
|
||||
/**
|
||||
* optimize a linear factor graph
|
||||
* @param ordering fg in order
|
||||
* @param enableJoinFactor uses the older joint factor combine process when true,
|
||||
* and when false uses the newer single matrix combine
|
||||
*/
|
||||
VectorConfig optimize(const Ordering& ordering, bool enableJoinFactor = true);
|
||||
|
||||
/**
|
||||
* optimize a linear factor graph using a multi-frontal solver
|
||||
* @param ordering fg in order
|
||||
*/
|
||||
VectorConfig optimizeMultiFrontals(const Ordering& ordering);
|
||||
|
||||
/**
|
||||
* shared pointer versions for MATLAB
|
||||
*/
|
||||
boost::shared_ptr<GaussianBayesNet> eliminate_(const Ordering&);
|
||||
boost::shared_ptr<VectorConfig> optimize_(const Ordering&);
|
||||
// /**
|
||||
// * shared pointer versions for MATLAB
|
||||
// */
|
||||
// boost::shared_ptr<GaussianBayesNet> eliminate_(const Ordering&);
|
||||
// boost::shared_ptr<VectorConfig> optimize_(const Ordering&);
|
||||
|
||||
/**
|
||||
* static function that combines two factor graphs
|
||||
|
|
@ -181,108 +174,229 @@ namespace gtsam {
|
|||
*/
|
||||
void combine(const GaussianFactorGraph &lfg);
|
||||
|
||||
/**
|
||||
* Find all variables and their dimensions
|
||||
* @return The set of all variable/dimension pairs
|
||||
*/
|
||||
Dimensions dimensions() const;
|
||||
// /**
|
||||
// * Find all variables and their dimensions
|
||||
// * @return The set of all variable/dimension pairs
|
||||
// */
|
||||
// Dimensions dimensions() const;
|
||||
|
||||
/**
|
||||
* Add zero-mean i.i.d. Gaussian prior terms to each variable
|
||||
* @param sigma Standard deviation of Gaussian
|
||||
*/
|
||||
GaussianFactorGraph add_priors(double sigma) const;
|
||||
GaussianFactorGraph add_priors(double sigma, const GaussianVariableIndex<>& variableIndex) const;
|
||||
|
||||
/**
|
||||
* Return RHS (b./sigmas) as Errors class
|
||||
*/
|
||||
Errors rhs() const;
|
||||
// /**
|
||||
// * Return RHS (b./sigmas) as Errors class
|
||||
// */
|
||||
// Errors rhs() const;
|
||||
//
|
||||
// /**
|
||||
// * Return RHS (b./sigmas) as Vector
|
||||
// */
|
||||
// Vector rhsVector() const;
|
||||
//
|
||||
// /**
|
||||
// * Return (dense) matrix associated with factor graph
|
||||
// * @param ordering of variables needed for matrix column order
|
||||
// */
|
||||
// std::pair<Matrix,Vector> matrix (const Ordering& ordering) const;
|
||||
|
||||
/**
|
||||
* Return RHS (b./sigmas) as Vector
|
||||
*/
|
||||
Vector rhsVector() const;
|
||||
// /**
|
||||
// * split the source vector w.r.t. the given ordering and assemble a vector config
|
||||
// * @param v: the source vector
|
||||
// * @param ordeirng: the ordering corresponding to the vector
|
||||
// */
|
||||
// VectorConfig assembleConfig(const Vector& v, const Ordering& ordering) const;
|
||||
//
|
||||
// /**
|
||||
// * get the 1-based starting column indices for all variables
|
||||
// * @param ordering of variables needed for matrix column order
|
||||
// * @return The set of all variable/index pairs
|
||||
// */
|
||||
// std::pair<Dimensions, size_t> columnIndices_(const Ordering& ordering) const;
|
||||
// Dimensions columnIndices(const Ordering& ordering) const;
|
||||
//
|
||||
// /**
|
||||
// * return the size of corresponding A matrix
|
||||
// */
|
||||
// std::pair<std::size_t, std::size_t> sizeOfA() const;
|
||||
|
||||
/**
|
||||
* Return (dense) matrix associated with factor graph
|
||||
* @param ordering of variables needed for matrix column order
|
||||
*/
|
||||
std::pair<Matrix,Vector> matrix (const Ordering& ordering) const;
|
||||
// /**
|
||||
// * Return 3*nzmax matrix where the rows correspond to the vectors i, j, and s
|
||||
// * to generate an m-by-n sparse matrix, which can be given to MATLAB's sparse function.
|
||||
// * The standard deviations are baked into A and b
|
||||
// * @param ordering of variables needed for matrix column order
|
||||
// */
|
||||
// Matrix sparse(const Ordering& ordering) const;
|
||||
//
|
||||
// /**
|
||||
// * Version that takes column indices rather than ordering
|
||||
// */
|
||||
// Matrix sparse(const Dimensions& indices) const;
|
||||
|
||||
/**
|
||||
* split the source vector w.r.t. the given ordering and assemble a vector config
|
||||
* @param v: the source vector
|
||||
* @param ordeirng: the ordering corresponding to the vector
|
||||
*/
|
||||
VectorConfig assembleConfig(const Vector& v, const Ordering& ordering) const;
|
||||
|
||||
/**
|
||||
* get the 1-based starting column indices for all variables
|
||||
* @param ordering of variables needed for matrix column order
|
||||
* @return The set of all variable/index pairs
|
||||
*/
|
||||
std::pair<Dimensions, size_t> columnIndices_(const Ordering& ordering) const;
|
||||
Dimensions columnIndices(const Ordering& ordering) const;
|
||||
|
||||
/**
|
||||
* return the size of corresponding A matrix
|
||||
*/
|
||||
std::pair<std::size_t, std::size_t> sizeOfA() const;
|
||||
|
||||
/**
|
||||
* Return 3*nzmax matrix where the rows correspond to the vectors i, j, and s
|
||||
* to generate an m-by-n sparse matrix, which can be given to MATLAB's sparse function.
|
||||
* The standard deviations are baked into A and b
|
||||
* @param ordering of variables needed for matrix column order
|
||||
*/
|
||||
Matrix sparse(const Ordering& ordering) const;
|
||||
|
||||
/**
|
||||
* Version that takes column indices rather than ordering
|
||||
*/
|
||||
Matrix sparse(const Dimensions& indices) const;
|
||||
|
||||
/**
|
||||
* Find solution using gradient descent
|
||||
* @param x0: VectorConfig specifying initial estimate
|
||||
* @return solution
|
||||
*/
|
||||
VectorConfig steepestDescent(const VectorConfig& x0, bool verbose = false,
|
||||
double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
|
||||
/**
|
||||
* shared pointer versions for MATLAB
|
||||
*/
|
||||
boost::shared_ptr<VectorConfig>
|
||||
steepestDescent_(const VectorConfig& x0, bool verbose = false,
|
||||
double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
|
||||
/**
|
||||
* Find solution using conjugate gradient descent
|
||||
* @param x0: VectorConfig specifying initial estimate
|
||||
* @return solution
|
||||
*/
|
||||
VectorConfig conjugateGradientDescent(const VectorConfig& x0, bool verbose =
|
||||
false, double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
|
||||
/**
|
||||
* shared pointer versions for MATLAB
|
||||
*/
|
||||
boost::shared_ptr<VectorConfig> conjugateGradientDescent_(
|
||||
const VectorConfig& x0, bool verbose = false, double epsilon = 1e-3,
|
||||
size_t maxIterations = 0) const;
|
||||
// /**
|
||||
// * Find solution using gradient descent
|
||||
// * @param x0: VectorConfig specifying initial estimate
|
||||
// * @return solution
|
||||
// */
|
||||
// VectorConfig steepestDescent(const VectorConfig& x0, bool verbose = false,
|
||||
// double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
//
|
||||
// /**
|
||||
// * shared pointer versions for MATLAB
|
||||
// */
|
||||
// boost::shared_ptr<VectorConfig>
|
||||
// steepestDescent_(const VectorConfig& x0, bool verbose = false,
|
||||
// double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
//
|
||||
// /**
|
||||
// * Find solution using conjugate gradient descent
|
||||
// * @param x0: VectorConfig specifying initial estimate
|
||||
// * @return solution
|
||||
// */
|
||||
// VectorConfig conjugateGradientDescent(const VectorConfig& x0, bool verbose =
|
||||
// false, double epsilon = 1e-3, size_t maxIterations = 0) const;
|
||||
//
|
||||
// /**
|
||||
// * shared pointer versions for MATLAB
|
||||
// */
|
||||
// boost::shared_ptr<VectorConfig> conjugateGradientDescent_(
|
||||
// const VectorConfig& x0, bool verbose = false, double epsilon = 1e-3,
|
||||
// size_t maxIterations = 0) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the augmented matrix version of a set of factors
|
||||
* with the corresponding noiseModel
|
||||
* @param factors is the set of factors to combine
|
||||
* @param ordering of variables needed for matrix column order
|
||||
* @return the augmented matrix and a noise model
|
||||
*/
|
||||
template <class Factors>
|
||||
std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
const Factors& factors,
|
||||
const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class VariableIndexStorage>
|
||||
class GaussianVariableIndex : public VariableIndex<VariableIndexStorage> {
|
||||
public:
|
||||
typedef VariableIndex<VariableIndexStorage> Base;
|
||||
typedef typename VariableIndexStorage::template type_factory<size_t>::type storage_type;
|
||||
|
||||
storage_type dims_;
|
||||
|
||||
public:
|
||||
typedef boost::shared_ptr<GaussianVariableIndex> shared_ptr;
|
||||
|
||||
/** Construct an empty GaussianVariableIndex */
|
||||
GaussianVariableIndex() {}
|
||||
|
||||
/**
|
||||
* Constructor from a GaussianFactorGraph, lets the base class build the
|
||||
* column-wise index then fills the dims_ array.
|
||||
*/
|
||||
GaussianVariableIndex(const GaussianFactorGraph& factorGraph);
|
||||
|
||||
/**
|
||||
* Constructor to "upgrade" from the base class without recomputing the
|
||||
* column index, i.e. just fills the dims_ array.
|
||||
*/
|
||||
GaussianVariableIndex(const VariableIndex<VariableIndexStorage>& variableIndex, const GaussianFactorGraph& factorGraph);
|
||||
|
||||
/**
|
||||
* Another constructor to upgrade from the base class using an existing
|
||||
* array of variable dimensions.
|
||||
*/
|
||||
GaussianVariableIndex(const VariableIndex<VariableIndexStorage>& variableIndex, const storage_type& dimensions);
|
||||
|
||||
const storage_type& dims() const { return dims_; }
|
||||
size_t dim(varid_t variable) const { Base::checkVar(variable); return dims_[variable]; }
|
||||
|
||||
/** Permute */
|
||||
void permute(const Permutation& permutation);
|
||||
|
||||
/** Augment this variable index with the contents of another one */
|
||||
void augment(const GaussianFactorGraph& factorGraph);
|
||||
|
||||
protected:
|
||||
GaussianVariableIndex(size_t nVars) : Base(nVars), dims_(nVars) {}
|
||||
void fillDims(const GaussianFactorGraph& factorGraph);
|
||||
};
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
GaussianVariableIndex<Storage>::GaussianVariableIndex(const GaussianFactorGraph& factorGraph) :
|
||||
Base(factorGraph), dims_(Base::index_.size()) {
|
||||
fillDims(factorGraph); }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
GaussianVariableIndex<Storage>::GaussianVariableIndex(
|
||||
const VariableIndex<Storage>& variableIndex, const GaussianFactorGraph& factorGraph) :
|
||||
Base(variableIndex), dims_(Base::index_.size()) {
|
||||
fillDims(factorGraph); }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
GaussianVariableIndex<Storage>::GaussianVariableIndex(
|
||||
const VariableIndex<Storage>& variableIndex, const storage_type& dimensions) :
|
||||
Base(variableIndex), dims_(dimensions) {
|
||||
assert(Base::index_.size() == dims_.size()); }
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void GaussianVariableIndex<Storage>::fillDims(const GaussianFactorGraph& factorGraph) {
|
||||
// Store dimensions of each variable
|
||||
assert(dims_.size() == Base::index_.size());
|
||||
for(varid_t var=0; var<Base::index_.size(); ++var)
|
||||
if(!Base::index_[var].empty()) {
|
||||
size_t factorIndex = Base::operator [](var).front().factorIndex;
|
||||
size_t variablePosition = Base::operator [](var).front().variablePosition;
|
||||
boost::shared_ptr<const GaussianFactor> factor(factorGraph[factorIndex]);
|
||||
dims_[var] = factor->getDim(factor->begin() + variablePosition);
|
||||
} else
|
||||
dims_[var] = 0;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void GaussianVariableIndex<Storage>::permute(const Permutation& permutation) {
|
||||
VariableIndex<Storage>::permute(permutation);
|
||||
storage_type original(this->dims_.size());
|
||||
this->dims_.swap(original);
|
||||
for(varid_t j=0; j<permutation.size(); ++j)
|
||||
this->dims_[j] = original[permutation[j]];
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class Storage>
|
||||
void GaussianVariableIndex<Storage>::augment(const GaussianFactorGraph& factorGraph) {
|
||||
Base::augment(factorGraph);
|
||||
dims_.resize(Base::index_.size(), 0);
|
||||
BOOST_FOREACH(boost::shared_ptr<const GaussianFactor> factor, factorGraph) {
|
||||
for(GaussianFactor::const_iterator var=factor->begin(); var!=factor->end(); ++var) {
|
||||
#ifndef NDEBUG
|
||||
if(dims_[*var] != 0)
|
||||
assert(dims_[*var] == factor->getDim(var));
|
||||
#endif
|
||||
if(dims_[*var] == 0)
|
||||
dims_[*var] = factor->getDim(var);
|
||||
}
|
||||
}
|
||||
// for(varid_t var=0; var<dims_.size(); ++var) {
|
||||
//#ifndef NDEBUG
|
||||
// if(var >= varIndex.dims_.size() || varIndex.dims_[var] == 0)
|
||||
// assert(dims_[var] != 0);
|
||||
// else if(varIndex.dims_[var] != 0 && dims_[var] != 0)
|
||||
// assert(dims_[var] == varIndex.dims_[var]);
|
||||
//#endif
|
||||
// if(dims_[var] == 0)
|
||||
// dims_[var] = varIndex.dims_[var];
|
||||
// }
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Returns the augmented matrix version of a set of factors
|
||||
// * with the corresponding noiseModel
|
||||
// * @param factors is the set of factors to combine
|
||||
// * @param ordering of variables needed for matrix column order
|
||||
// * @return the augmented matrix and a noise model
|
||||
// */
|
||||
// template <class Factors>
|
||||
// std::pair<Matrix, SharedDiagonal> combineFactorsAndCreateMatrix(
|
||||
// const Factors& factors,
|
||||
// const Ordering& order, const Dimensions& dimensions);
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ void optimize(const GaussianISAM::sharedClique& clique, VectorConfig& result) {
|
|||
for (it = clique->rbegin(); it!=clique->rend(); it++) {
|
||||
GaussianConditional::shared_ptr cg = *it;
|
||||
Vector x = cg->solve(result); // Solve for that variable
|
||||
result.insert(cg->key(), x); // store result in partial solution
|
||||
result[cg->key()] = x; // store result in partial solution
|
||||
}
|
||||
BOOST_FOREACH(const GaussianISAM::sharedClique& child, clique->children_) {
|
||||
// list<GaussianISAM::Clique::shared_ptr>::const_iterator child;
|
||||
|
|
@ -33,7 +33,7 @@ void optimize(const GaussianISAM::sharedClique& clique, VectorConfig& result) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig optimize(const GaussianISAM& bayesTree) {
|
||||
VectorConfig result;
|
||||
VectorConfig result(bayesTree.dims_);
|
||||
// starting from the root, call optimize on each conditional
|
||||
optimize(bayesTree.root(), result);
|
||||
return result;
|
||||
|
|
|
|||
|
|
@ -8,12 +8,57 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/inference/ISAM.h>
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/linear/GaussianFactor.h>
|
||||
#include <gtsam/inference/ISAM.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
typedef ISAM<GaussianConditional> GaussianISAM;
|
||||
class GaussianISAM : public ISAM<GaussianConditional> {
|
||||
|
||||
std::deque<size_t, boost::fast_pool_allocator<size_t> > dims_;
|
||||
|
||||
public:
|
||||
|
||||
/** Create an empty Bayes Tree */
|
||||
GaussianISAM() : ISAM<GaussianConditional>() {}
|
||||
|
||||
/** Create a Bayes Tree from a Bayes Net */
|
||||
GaussianISAM(const GaussianBayesNet& bayesNet) : ISAM<GaussianConditional>(bayesNet) {}
|
||||
|
||||
/** Override update_internal to also keep track of variable dimensions. */
|
||||
template<class FactorGraph>
|
||||
void update_internal(const FactorGraph& newFactors, Cliques& orphans) {
|
||||
|
||||
ISAM<GaussianConditional>::update_internal(newFactors, orphans);
|
||||
|
||||
// update dimensions
|
||||
BOOST_FOREACH(const typename FactorGraph::sharedFactor& factor, newFactors) {
|
||||
for(typename FactorGraph::factor_type::const_iterator key = factor->begin(); key != factor->end(); ++key) {
|
||||
if(*key >= dims_.size())
|
||||
dims_.resize(*key + 1);
|
||||
if(dims_[*key] == 0)
|
||||
dims_[*key] = factor->getDim(key);
|
||||
else
|
||||
assert(dims_[*key] == factor->getDim(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template<class FactorGraph>
|
||||
void update(const FactorGraph& newFactors) {
|
||||
Cliques orphans;
|
||||
this->update_internal(newFactors, orphans);
|
||||
}
|
||||
|
||||
void clear() {
|
||||
ISAM<GaussianConditional>::clear();
|
||||
dims_.clear();
|
||||
}
|
||||
|
||||
friend VectorConfig optimize(const GaussianISAM&);
|
||||
|
||||
};
|
||||
|
||||
// recursively optimize this conditional and all subtrees
|
||||
void optimize(const GaussianISAM::sharedClique& clique, VectorConfig& result);
|
||||
|
|
|
|||
|
|
@ -6,12 +6,14 @@
|
|||
* @brief: the Gaussian junction tree
|
||||
*/
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <gtsam/inference/ClusterTree-inl.h>
|
||||
#include <gtsam/inference/JunctionTree-inl.h>
|
||||
#include <gtsam/linear/GaussianJunctionTree.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// explicit template instantiation
|
||||
|
|
@ -23,34 +25,51 @@ namespace gtsam {
|
|||
/**
|
||||
* GaussianJunctionTree
|
||||
*/
|
||||
void GaussianJunctionTree::btreeBackSubstitue(
|
||||
BayesTree<GaussianConditional>::sharedClique current,
|
||||
VectorConfig& config) {
|
||||
void GaussianJunctionTree::btreeBackSubstitue(const boost::shared_ptr<const BayesTree::Clique>& current, VectorConfig& config) const {
|
||||
// solve the bayes net in the current node
|
||||
BayesNet<GaussianConditional>::const_reverse_iterator it = current->rbegin();
|
||||
for (; it!=current->rend(); it++) {
|
||||
GaussianBayesNet::const_reverse_iterator it = current->rbegin();
|
||||
for (; it!=current->rend(); ++it) {
|
||||
Vector x = (*it)->solve(config); // Solve for that variable
|
||||
config.insert((*it)->key(),x); // store result in partial solution
|
||||
config[(*it)->key()] = x; // store result in partial solution
|
||||
}
|
||||
|
||||
// solve the bayes nets in the child nodes
|
||||
typedef BayesTree<GaussianConditional>::sharedClique sharedBayesClique;
|
||||
BOOST_FOREACH(sharedBayesClique child, current->children_) {
|
||||
BOOST_FOREACH(const typename BayesTree::sharedClique& child, current->children()) {
|
||||
btreeBackSubstitue(child, config);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void countDims(const boost::shared_ptr<const BayesTree<GaussianConditional>::Clique>& clique, vector<size_t>& dims) {
|
||||
BOOST_FOREACH(const boost::shared_ptr<const GaussianConditional>& cond, *clique) {
|
||||
// There should be no two conditionals on the same variable
|
||||
assert(dims[cond->key()] == 0);
|
||||
dims[cond->key()] = cond->dim();
|
||||
}
|
||||
BOOST_FOREACH(const boost::shared_ptr<const BayesTree<GaussianConditional>::Clique>& child, clique->children()) {
|
||||
countDims(child, dims);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorConfig GaussianJunctionTree::optimize() {
|
||||
VectorConfig GaussianJunctionTree::optimize() const {
|
||||
tic("GJT optimize 1: eliminate");
|
||||
// eliminate from leaves to the root
|
||||
BayesTree<GaussianConditional>::sharedClique rootClique;
|
||||
rootClique = this->eliminate<GaussianConditional>();
|
||||
boost::shared_ptr<const BayesTree::Clique> rootClique(this->eliminate());
|
||||
toc("GJT optimize 1: eliminate");
|
||||
|
||||
// Allocate solution vector
|
||||
tic("GJT optimize 2: allocate VectorConfig");
|
||||
vector<size_t> dims(rootClique->back()->key() + 1, 0);
|
||||
countDims(rootClique, dims);
|
||||
VectorConfig result(dims);
|
||||
toc("GJT optimize 2: allocate VectorConfig");
|
||||
|
||||
// back-substitution
|
||||
VectorConfig result;
|
||||
tic("GJT optimize 3: back-substitute");
|
||||
btreeBackSubstitue(rootClique, result);
|
||||
toc("GJT optimize 3: back-substitute");
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
} //namespace gtsam
|
||||
|
|
|
|||
|
|
@ -25,17 +25,17 @@ namespace gtsam {
|
|||
|
||||
protected:
|
||||
// back-substitute in topological sort order (parents first)
|
||||
void btreeBackSubstitue(BayesTree<GaussianConditional>::sharedClique current, VectorConfig& config);
|
||||
void btreeBackSubstitue(const boost::shared_ptr<const BayesTree::Clique>& current, VectorConfig& config) const;
|
||||
|
||||
public :
|
||||
|
||||
GaussianJunctionTree() : Base() {}
|
||||
|
||||
// constructor
|
||||
GaussianJunctionTree(GaussianFactorGraph& fg, const Ordering& ordering) : Base(fg, ordering) {}
|
||||
GaussianJunctionTree(const GaussianFactorGraph& fg) : Base(fg) {}
|
||||
|
||||
// optimize the linear graph
|
||||
VectorConfig optimize();
|
||||
VectorConfig optimize() const;
|
||||
}; // GaussianJunctionTree
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ check_PROGRAMS += tests/testNoiseModel tests/testErrors
|
|||
|
||||
# Vector Configurations
|
||||
headers += VectorConfig.h
|
||||
sources += VectorMap.cpp VectorBTree.cpp
|
||||
check_PROGRAMS += tests/testVectorMap tests/testVectorBTree
|
||||
#sources += VectorMap.cpp VectorBTree.cpp
|
||||
#check_PROGRAMS += tests/testVectorMap tests/testVectorBTree
|
||||
|
||||
# Gaussian Factor Graphs
|
||||
headers += GaussianFactorSet.h Factorization.h
|
||||
|
|
@ -26,15 +26,17 @@ sources += GaussianFactor.cpp GaussianFactorGraph.cpp
|
|||
sources += GaussianJunctionTree.cpp
|
||||
sources += GaussianConditional.cpp GaussianBayesNet.cpp
|
||||
sources += GaussianISAM.cpp
|
||||
check_PROGRAMS += tests/testGaussianFactor tests/testGaussianJunctionTree tests/testGaussianConditional
|
||||
check_PROGRAMS += tests/testGaussianFactor tests/testGaussianConditional
|
||||
check_PROGRAMS += tests/testGaussianJunctionTree
|
||||
|
||||
# Iterative Methods
|
||||
headers += iterative-inl.h SubgraphSolver.h SubgraphSolver-inl.h
|
||||
sources += iterative.cpp BayesNetPreconditioner.cpp SubgraphPreconditioner.cpp
|
||||
check_PROGRAMS += tests/testBayesNetPreconditioner
|
||||
#headers += iterative-inl.h SubgraphSolver.h SubgraphSolver-inl.h
|
||||
#sources += iterative.cpp BayesNetPreconditioner.cpp SubgraphPreconditioner.cpp
|
||||
#check_PROGRAMS += tests/testBayesNetPreconditioner
|
||||
|
||||
# Timing tests
|
||||
noinst_PROGRAMS = tests/timeGaussianFactor tests/timeVectorConfig
|
||||
noinst_PROGRAMS = tests/timeGaussianFactor tests/timeFactorOverhead tests/timeSLAMlike
|
||||
#noinst_PROGRAMS += tests/timeVectorConfig
|
||||
|
||||
#----------------------------------------------------------------------------------------------------
|
||||
# Create a libtool library that is not installed
|
||||
|
|
@ -54,10 +56,14 @@ AM_CXXFLAGS =
|
|||
#----------------------------------------------------------------------------------------------------
|
||||
TESTS = $(check_PROGRAMS)
|
||||
AM_LDFLAGS = $(BOOST_LDFLAGS) $(boost_serialization)
|
||||
LDADD = liblinear.la ../inference/libinference.la ../base/libbase.la
|
||||
LDADD = liblinear.la ../inference/libinference.la ../base/libbase.la
|
||||
LDADD += ../CppUnitLite/libCppUnitLite.a ../colamd/libcolamd.la
|
||||
AM_DEFAULT_SOURCE_EXT = .cpp
|
||||
|
||||
if USE_ACCELERATE_MACOS
|
||||
AM_LDFLAGS += -Wl,/System/Library/Frameworks/Accelerate.framework/Accelerate
|
||||
endif
|
||||
|
||||
# rule to run an executable
|
||||
%.run: % $(LDADD)
|
||||
./$^
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ static double inf = std::numeric_limits<double>::infinity();
|
|||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
namespace noiseModel {
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -103,8 +104,12 @@ void Gaussian::WhitenInPlace(Matrix& H) const {
|
|||
H = thisR() * H;
|
||||
}
|
||||
|
||||
void Gaussian::WhitenInPlace(MatrixColMajor& H) const {
|
||||
H = ublas::prod(thisR(), H);
|
||||
}
|
||||
|
||||
// General QR, see also special version in Constrained
|
||||
SharedDiagonal Gaussian::QR(Matrix& Ab) const {
|
||||
SharedDiagonal Gaussian::QR(Matrix& Ab, boost::optional<vector<long>&> firstZeroRows) const {
|
||||
|
||||
// get size(A) and maxRank
|
||||
// TODO: really no rank problems ?
|
||||
|
|
@ -116,9 +121,10 @@ SharedDiagonal Gaussian::QR(Matrix& Ab) const {
|
|||
|
||||
// Perform in-place Householder
|
||||
#ifdef GT_USE_LAPACK
|
||||
long* Stair = MakeStairs(Ab);
|
||||
householder_spqr(Ab, Stair);
|
||||
// householder_spqr(Ab);
|
||||
if(firstZeroRows)
|
||||
householder_spqr(Ab, &(*firstZeroRows)[0]);
|
||||
else
|
||||
householder_spqr(Ab);
|
||||
#else
|
||||
householder(Ab, maxRank);
|
||||
#endif
|
||||
|
|
@ -126,6 +132,27 @@ SharedDiagonal Gaussian::QR(Matrix& Ab) const {
|
|||
return Unit::Create(maxRank);
|
||||
}
|
||||
|
||||
// General QR, see also special version in Constrained
|
||||
SharedDiagonal Gaussian::QRColumnWise(ublas::matrix<double, ublas::column_major>& Ab, vector<long>& firstZeroRows) const {
|
||||
|
||||
// get size(A) and maxRank
|
||||
// TODO: really no rank problems ?
|
||||
size_t m = Ab.size1(), n = Ab.size2()-1;
|
||||
size_t maxRank = min(m,n);
|
||||
|
||||
// pre-whiten everything (cheaply if possible)
|
||||
WhitenInPlace(Ab);
|
||||
|
||||
// Perform in-place Householder
|
||||
#ifdef GT_USE_LAPACK
|
||||
householder_spqr_colmajor(Ab, &firstZeroRows[0]);
|
||||
#else
|
||||
householder(Ab, maxRank);
|
||||
#endif
|
||||
|
||||
return Unit::Create(maxRank);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Diagonal::Diagonal(const Vector& sigmas) :
|
||||
Gaussian(sigmas.size()), sigmas_(sigmas), invsigmas_(reciprocal(sigmas)) {
|
||||
|
|
@ -172,6 +199,11 @@ void Diagonal::WhitenInPlace(Matrix& H) const {
|
|||
vector_scale_inplace(invsigmas_, H);
|
||||
}
|
||||
|
||||
void Diagonal::WhitenInPlace(MatrixColMajor& H) const {
|
||||
|
||||
vector_scale_inplace(invsigmas_, H);
|
||||
}
|
||||
|
||||
Vector Diagonal::sample() const {
|
||||
Vector result(dim_);
|
||||
for (size_t i = 0; i < dim_; i++) {
|
||||
|
|
@ -202,11 +234,15 @@ void Constrained::WhitenInPlace(Matrix& H) const {
|
|||
throw logic_error("noiseModel::Constrained cannot Whiten");
|
||||
}
|
||||
|
||||
void Constrained::WhitenInPlace(MatrixColMajor& H) const {
|
||||
throw logic_error("noiseModel::Constrained cannot Whiten");
|
||||
}
|
||||
|
||||
// Special version of QR for Constrained calls slower but smarter code
|
||||
// that deals with possibly zero sigmas
|
||||
// It is Gram-Schmidt orthogonalization rather than Householder
|
||||
// Previously Diagonal::QR
|
||||
SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
||||
SharedDiagonal Constrained::QR(Matrix& Ab, boost::optional<std::vector<long>&> firstZeroRows) const {
|
||||
bool verbose = false;
|
||||
if (verbose) cout << "\nStarting Constrained::QR" << endl;
|
||||
|
||||
|
|
@ -277,6 +313,13 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
return mixed ? Constrained::MixedPrecisions(precisions) : Diagonal::Precisions(precisions);
|
||||
}
|
||||
|
||||
SharedDiagonal Constrained::QRColumnWise(ublas::matrix<double, ublas::column_major>& Ab, vector<long>& firstZeroRows) const {
|
||||
Matrix AbRowWise(Ab);
|
||||
SharedDiagonal result = this->QR(AbRowWise, firstZeroRows);
|
||||
Ab = AbRowWise;
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
Isotropic::shared_ptr Isotropic::Variance(size_t dim, double variance, bool smart) {
|
||||
|
|
@ -309,6 +352,10 @@ void Isotropic::WhitenInPlace(Matrix& H) const {
|
|||
H *= invsigma_;
|
||||
}
|
||||
|
||||
void Isotropic::WhitenInPlace(MatrixColMajor& H) const {
|
||||
H *= invsigma_;
|
||||
}
|
||||
|
||||
// faster version
|
||||
Vector Isotropic::sample() const {
|
||||
typedef boost::normal_distribution<double> Normal;
|
||||
|
|
|
|||
|
|
@ -141,6 +141,7 @@ namespace gtsam {
|
|||
* In-place version
|
||||
*/
|
||||
virtual void WhitenInPlace(Matrix& H) const;
|
||||
virtual void WhitenInPlace(MatrixColMajor& H) const;
|
||||
|
||||
/**
|
||||
* Whiten a system, in place as well
|
||||
|
|
@ -157,7 +158,13 @@ namespace gtsam {
|
|||
* @param Ab is the m*(n+1) augmented system matrix [A b]
|
||||
* @return in-place QR factorization [R d]. Below-diagonal is undefined !!!!!
|
||||
*/
|
||||
virtual SharedDiagonal QR(Matrix& Ab) const;
|
||||
virtual SharedDiagonal QR(Matrix& Ab, boost::optional<std::vector<long>&> firstZeroRows = boost::none) const;
|
||||
|
||||
/**
|
||||
* Version for column-wise matrices
|
||||
*/
|
||||
virtual SharedDiagonal QRColumnWise(boost::numeric::ublas::matrix<double, boost::numeric::ublas::column_major>& Ab,
|
||||
std::vector<long>& firstZeroRows) const;
|
||||
|
||||
/**
|
||||
* Return R itself, but note that Whiten(H) is cheaper than R*H
|
||||
|
|
@ -219,6 +226,7 @@ namespace gtsam {
|
|||
virtual Vector unwhiten(const Vector& v) const;
|
||||
virtual Matrix Whiten(const Matrix& H) const;
|
||||
virtual void WhitenInPlace(Matrix& H) const;
|
||||
virtual void WhitenInPlace(MatrixColMajor& H) const;
|
||||
|
||||
/**
|
||||
* Return standard deviations (sqrt of diagonal)
|
||||
|
|
@ -300,11 +308,18 @@ namespace gtsam {
|
|||
|
||||
virtual Matrix Whiten(const Matrix& H) const;
|
||||
virtual void WhitenInPlace(Matrix& H) const;
|
||||
virtual void WhitenInPlace(MatrixColMajor& H) const;
|
||||
|
||||
/**
|
||||
* Apply QR factorization to the system [A b], taking into account constraints
|
||||
*/
|
||||
virtual SharedDiagonal QR(Matrix& Ab) const;
|
||||
virtual SharedDiagonal QR(Matrix& Ab, boost::optional<std::vector<long>&> firstZeroRows = boost::none) const;
|
||||
|
||||
/**
|
||||
* Version for column-wise matrices
|
||||
*/
|
||||
virtual SharedDiagonal QRColumnWise(boost::numeric::ublas::matrix<double, boost::numeric::ublas::column_major>& Ab,
|
||||
std::vector<long>& firstZeroRows) const;
|
||||
|
||||
/**
|
||||
* Check constrained is always true
|
||||
|
|
@ -355,6 +370,7 @@ namespace gtsam {
|
|||
virtual Vector unwhiten(const Vector& v) const;
|
||||
virtual Matrix Whiten(const Matrix& H) const;
|
||||
virtual void WhitenInPlace(Matrix& H) const;
|
||||
virtual void WhitenInPlace(MatrixColMajor& H) const;
|
||||
|
||||
/**
|
||||
* Return standard deviation
|
||||
|
|
|
|||
|
|
@ -1,41 +1,206 @@
|
|||
/**
|
||||
* @file VectorConfig.h
|
||||
* @brief Factor Graph Configuration
|
||||
* @author Frank Dellaert
|
||||
* @author Richard Roberts
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
/*
|
||||
* There are two interchangeable implementations of VectorConfig.
|
||||
*
|
||||
* VectorMap uses a straightforward stl::map of Vectors. It has O(log n)
|
||||
* insert and access, and is fairly fast at both. However, it has high overhead
|
||||
* for arithmetic operations such as +, scale, axpy etc...
|
||||
*
|
||||
* VectorBTree uses a functional BTree as a way to access SubVectors
|
||||
* in an ordinary Vector. Inserting is O(n) and much slower, but accessing,
|
||||
* is O(log n) and might be a bit slower than VectorMap. Arithmetic operations
|
||||
* are blindingly fast, however. The cost is it is not as KISS as VectorMap.
|
||||
*
|
||||
* Access to vectors is now exclusively via operator[]
|
||||
* Vector access in VectorMap is via a Vector reference
|
||||
* Vector access in VectorBtree is via the SubVector type (see Vector.h)
|
||||
*
|
||||
* Feb 16 2010: FD: I made VectorMap the default, because I decided to try
|
||||
* and speed up conjugate gradients by using Sparse FactorGraphs all the way.
|
||||
*/
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/base/types.h>
|
||||
|
||||
// we use define and not typedefs as typdefs cannot be forward declared
|
||||
#ifdef VECTORBTREE
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/numeric/ublas/vector_proxy.hpp>
|
||||
#include <boost/numeric/ublas/io.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
#include <gtsam/linear/VectorBTree.h>
|
||||
#define VectorConfig VectorBTree
|
||||
#include <vector>
|
||||
|
||||
#else
|
||||
namespace gtsam {
|
||||
|
||||
#include <gtsam/linear/VectorMap.h>
|
||||
#define VectorConfig VectorMap
|
||||
class VectorConfig : public Testable<VectorConfig> {
|
||||
protected:
|
||||
Vector values_;
|
||||
std::vector<size_t> varStarts_;
|
||||
|
||||
#endif
|
||||
public:
|
||||
template<class C> class _impl_iterator; // Forward declaration of iterator implementation
|
||||
typedef boost::shared_ptr<VectorConfig> shared_ptr;
|
||||
typedef _impl_iterator<VectorConfig> iterator;
|
||||
typedef _impl_iterator<const VectorConfig> const_iterator;
|
||||
typedef boost::numeric::ublas::vector_range<Vector> value_reference_type;
|
||||
typedef boost::numeric::ublas::vector_range<const Vector> const_value_reference_type;
|
||||
typedef boost::numeric::ublas::vector_range<Vector> mapped_type;
|
||||
typedef boost::numeric::ublas::vector_range<const Vector> const_mapped_type;
|
||||
|
||||
// /**
|
||||
// * Constructor requires an existing GaussianVariableIndex to get variable
|
||||
// * dimensions.
|
||||
// */
|
||||
// VectorConfig(const GaussianVariableIndex& variableIndex);
|
||||
|
||||
/**
|
||||
* Default constructor creates an empty VectorConfig. reserve(...) must be
|
||||
* called to allocate space before any values can be added. This prevents
|
||||
* slow reallocation of space at runtime.
|
||||
*/
|
||||
VectorConfig() : varStarts_(1,0) {}
|
||||
|
||||
/** Construct from a container of variable dimensions (in variable order). */
|
||||
template<class Container>
|
||||
VectorConfig(const Container& dimensions);
|
||||
|
||||
/** Construct from a container of variable dimensions in variable order and
|
||||
* a combined Vector of all of the variables in order.
|
||||
*/
|
||||
VectorConfig(const std::vector<size_t>& dimensions, const Vector& values);
|
||||
|
||||
/** Element access */
|
||||
mapped_type operator[](varid_t variable);
|
||||
const_mapped_type operator[](varid_t variable) const;
|
||||
|
||||
/** Number of elements */
|
||||
varid_t size() const { return varStarts_.size()-1; }
|
||||
|
||||
/** Total dimensionality used (could be smaller than what has been allocated
|
||||
* with reserve(...) ).
|
||||
*/
|
||||
size_t dim() const { return varStarts_.back(); }
|
||||
|
||||
/** Total dimensions capacity allocated */
|
||||
size_t dimCapacity() const { return values_.size(); }
|
||||
|
||||
/** Iterator access */
|
||||
iterator begin() { return _impl_iterator<VectorConfig>(*this, 0); }
|
||||
const_iterator begin() const { return _impl_iterator<const VectorConfig>(*this, 0); }
|
||||
iterator end() { return _impl_iterator<VectorConfig>(*this, varStarts_.size()-1); }
|
||||
const_iterator end() const { return _impl_iterator<const VectorConfig>(*this, varStarts_.size()-1); }
|
||||
|
||||
/** Reserve space for a total number of variables and dimensionality */
|
||||
void reserve(varid_t nVars, size_t totalDims) { values_.resize(std::max(totalDims, values_.size())); varStarts_.reserve(nVars+1); }
|
||||
|
||||
/**
|
||||
* Append a variable using the next variable ID, and return that ID. Space
|
||||
* must have been allocated ahead of time using reserve(...).
|
||||
*/
|
||||
varid_t push_back_preallocated(const Vector& vector) {
|
||||
varid_t var = varStarts_.size()-1;
|
||||
varStarts_.push_back(varStarts_.back()+vector.size());
|
||||
this->operator[](var) = vector; // This will assert that values_ has enough allocated space.
|
||||
return var;
|
||||
}
|
||||
|
||||
/** Set all elements to zero */
|
||||
void makeZero() { boost::numeric::ublas::noalias(values_) = boost::numeric::ublas::zero_vector<double>(values_.size()); }
|
||||
|
||||
/** print required by Testable for unit testing */
|
||||
void print(const std::string& str = "VectorConfig: ") const {
|
||||
std::cout << str << ": " << varStarts_.size()-1 << " elements\n";
|
||||
for(varid_t var=0; var<size(); ++var) {
|
||||
std::cout << " " << var << " " << operator[](var) << "\n";
|
||||
}
|
||||
std::cout.flush();
|
||||
}
|
||||
|
||||
/** equals required by Testable for unit testing */
|
||||
bool equals(const VectorConfig& expected, double tol=1e-9) const {
|
||||
if(size() != expected.size()) return false;
|
||||
// iterate over all elements
|
||||
for(varid_t var=0; var<size(); ++var)
|
||||
if(!equal_with_abs_tol(expected[var],operator[](var),tol))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/** + operator simply adds Vectors. This checks for structural equivalence
|
||||
* when NDEBUG is not defined.
|
||||
*/
|
||||
VectorConfig operator+(const VectorConfig& c) const {
|
||||
assert(varStarts_ == c.varStarts_);
|
||||
VectorConfig result;
|
||||
result.varStarts_ = varStarts_;
|
||||
result.values_ = boost::numeric::ublas::project(values_, boost::numeric::ublas::range(0, varStarts_.back())) +
|
||||
boost::numeric::ublas::project(c.values_, boost::numeric::ublas::range(0, c.varStarts_.back()));
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterator (handles both iterator and const_iterator depending on whether
|
||||
* the template type is const.
|
||||
*/
|
||||
template<class C>
|
||||
class _impl_iterator {
|
||||
protected:
|
||||
C& config_;
|
||||
varid_t curVariable_;
|
||||
|
||||
_impl_iterator(C& config, varid_t curVariable) : config_(config), curVariable_(curVariable) {}
|
||||
void checkCompat(const _impl_iterator<C>& r) { assert(&config_ == &r.config_); }
|
||||
friend class VectorConfig;
|
||||
|
||||
public:
|
||||
typedef typename const_selector<C, VectorConfig, VectorConfig::mapped_type, VectorConfig::const_mapped_type>::type value_type;
|
||||
_impl_iterator<C>& operator++() { ++curVariable_; return *this; }
|
||||
_impl_iterator<C>& operator--() { --curVariable_; return *this; }
|
||||
_impl_iterator<C>& operator++(int) { throw std::runtime_error("Use prefix ++ operator"); }
|
||||
_impl_iterator<C>& operator--(int) { throw std::runtime_error("Use prefix -- operator"); }
|
||||
_impl_iterator<C>& operator+=(ptrdiff_t step) { curVariable_ += step; return *this; }
|
||||
_impl_iterator<C>& operator-=(ptrdiff_t step) { curVariable_ += step; return *this; }
|
||||
ptrdiff_t operator-(const _impl_iterator<C>& r) { checkCompat(r); return curVariable_ - r.curVariable_; }
|
||||
bool operator==(const _impl_iterator<C>& r) { checkCompat(r); return curVariable_ == r.curVariable_; }
|
||||
bool operator!=(const _impl_iterator<C>& r) { checkCompat(r); return curVariable_ != r.curVariable_; }
|
||||
value_type operator*() { return config_[curVariable_]; }
|
||||
};
|
||||
|
||||
protected:
|
||||
void checkVariable(varid_t variable) const { assert(variable < varStarts_.size()-1); }
|
||||
};
|
||||
|
||||
|
||||
//inline VectorConfig::VectorConfig(const GaussianVariableIndex& variableIndex) : varStarts_(variableIndex.size()+1) {
|
||||
// size_t varStart = 0;
|
||||
// varStarts_[0] = 0;
|
||||
// for(varid_t var=0; var<variableIndex.size(); ++var) {
|
||||
// varStart += variableIndex.dim(var);
|
||||
// varStarts_[var+1] = varStart;
|
||||
// }
|
||||
// values_.resize(varStarts_.back(), false);
|
||||
//}
|
||||
|
||||
template<class Container>
|
||||
inline VectorConfig::VectorConfig(const Container& dimensions) : varStarts_(dimensions.size()+1) {
|
||||
varStarts_[0] = 0;
|
||||
size_t varStart = 0;
|
||||
varid_t var = 0;
|
||||
BOOST_FOREACH(size_t dim, dimensions) {
|
||||
varStarts_[++var] = (varStart += dim);
|
||||
}
|
||||
values_.resize(varStarts_.back(), false);
|
||||
}
|
||||
|
||||
inline VectorConfig::VectorConfig(const std::vector<size_t>& dimensions, const Vector& values) :
|
||||
values_(values), varStarts_(dimensions.size()+1) {
|
||||
varStarts_[0] = 0;
|
||||
size_t varStart = 0;
|
||||
varid_t var = 0;
|
||||
BOOST_FOREACH(size_t dim, dimensions) {
|
||||
varStarts_[++var] = (varStart += dim);
|
||||
}
|
||||
assert(varStarts_.back() == values.size());
|
||||
}
|
||||
|
||||
inline VectorConfig::mapped_type VectorConfig::operator[](varid_t variable) {
|
||||
checkVariable(variable);
|
||||
return boost::numeric::ublas::project(values_,
|
||||
boost::numeric::ublas::range(varStarts_[variable], varStarts_[variable+1]));
|
||||
}
|
||||
|
||||
inline VectorConfig::const_mapped_type VectorConfig::operator[](varid_t variable) const {
|
||||
checkVariable(variable);
|
||||
return boost::numeric::ublas::project(values_,
|
||||
boost::numeric::ublas::range(varStarts_[variable], varStarts_[variable+1]));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,9 +18,9 @@ using namespace std;
|
|||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
void check_size(const Symbol& key, const Vector & vj, const Vector & dj) {
|
||||
void check_size(varid_t key, const Vector & vj, const Vector & dj) {
|
||||
if (dj.size()!=vj.size()) {
|
||||
cout << "For key \"" << (string)key << "\"" << endl;
|
||||
cout << "For key \"" << key << "\"" << endl;
|
||||
cout << "vj.size = " << vj.size() << endl;
|
||||
cout << "dj.size = " << dj.size() << endl;
|
||||
throw(std::invalid_argument("VectorMap::+ mismatched dimensions"));
|
||||
|
|
@ -28,21 +28,21 @@ void check_size(const Symbol& key, const Vector & vj, const Vector & dj) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::vector<Symbol> VectorMap::get_names() const {
|
||||
std::vector<Symbol> names;
|
||||
std::vector<varid_t> VectorMap::get_names() const {
|
||||
std::vector<varid_t> names;
|
||||
for(const_iterator it=values.begin(); it!=values.end(); it++)
|
||||
names.push_back(it->first);
|
||||
return names;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorMap& VectorMap::insert(const Symbol& name, const Vector& v) {
|
||||
VectorMap& VectorMap::insert(varid_t name, const Vector& v) {
|
||||
values.insert(std::make_pair(name,v));
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorMap& VectorMap::insertAdd(const Symbol& j, const Vector& a) {
|
||||
VectorMap& VectorMap::insertAdd(varid_t j, const Vector& a) {
|
||||
Vector& vj = values[j];
|
||||
if (vj.size()==0) vj = a; else vj += a;
|
||||
return *this;
|
||||
|
|
@ -69,12 +69,12 @@ size_t VectorMap::dim() const {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
const Vector& VectorMap::operator[](const Symbol& name) const {
|
||||
const Vector& VectorMap::operator[](varid_t name) const {
|
||||
return values.at(name);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector& VectorMap::operator[](const Symbol& name) {
|
||||
Vector& VectorMap::operator[](varid_t name) {
|
||||
return values.at(name);
|
||||
}
|
||||
|
||||
|
|
@ -137,7 +137,7 @@ Vector VectorMap::vector() const {
|
|||
Vector result(dim());
|
||||
|
||||
size_t cur_dim = 0;
|
||||
Symbol j; Vector vj;
|
||||
varid_t j; Vector vj;
|
||||
FOREACH_PAIR(j, vj, values) {
|
||||
subInsert(result, vj, cur_dim);
|
||||
cur_dim += vj.size();
|
||||
|
|
@ -149,7 +149,7 @@ Vector VectorMap::vector() const {
|
|||
VectorMap expmap(const VectorMap& original, const VectorMap& delta)
|
||||
{
|
||||
VectorMap newConfig;
|
||||
Symbol j; Vector vj; // rtodo: copying vector?
|
||||
varid_t j; Vector vj; // rtodo: copying vector?
|
||||
FOREACH_PAIR(j, vj, original.values) {
|
||||
if (delta.contains(j)) {
|
||||
const Vector& dj = delta[j];
|
||||
|
|
@ -167,7 +167,7 @@ VectorMap expmap(const VectorMap& original, const Vector& delta)
|
|||
{
|
||||
VectorMap newConfig;
|
||||
size_t i = 0;
|
||||
Symbol j; Vector vj; // rtodo: copying vector?
|
||||
varid_t j; Vector vj; // rtodo: copying vector?
|
||||
FOREACH_PAIR(j, vj, original.values) {
|
||||
size_t mj = vj.size();
|
||||
Vector dj = sub(delta, i, i+mj);
|
||||
|
|
@ -182,7 +182,7 @@ void VectorMap::print(const string& name) const {
|
|||
odprintf("VectorMap %s\n", name.c_str());
|
||||
odprintf("size: %d\n", values.size());
|
||||
for (const_iterator it = begin(); it != end(); it++) {
|
||||
odprintf("%s:", ((string)it->first).c_str());
|
||||
odprintf("%d:", it->first);
|
||||
gtsam::print(it->second);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,164 +10,163 @@
|
|||
#pragma once
|
||||
|
||||
#include <map>
|
||||
//#include <boost/serialization/map.hpp>
|
||||
#include <boost/serialization/map.hpp>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/SymbolMap.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/** Factor Graph Configuration */
|
||||
class VectorMap : public Testable<VectorMap> {
|
||||
/** Factor Graph Configuration */
|
||||
class VectorMap : public Testable<VectorMap> {
|
||||
|
||||
protected:
|
||||
/** Map from string indices to values */
|
||||
SymbolMap<Vector> values;
|
||||
protected:
|
||||
/** Map from string indices to values */
|
||||
std::map<varid_t,Vector> values;
|
||||
|
||||
public:
|
||||
typedef SymbolMap<Vector>::iterator iterator;
|
||||
typedef SymbolMap<Vector>::const_iterator const_iterator;
|
||||
public:
|
||||
typedef std::map<varid_t,Vector>::iterator iterator;
|
||||
typedef std::map<varid_t,Vector>::const_iterator const_iterator;
|
||||
|
||||
VectorMap() {}
|
||||
VectorMap(const VectorMap& cfg_in): values(cfg_in.values) {}
|
||||
VectorMap(const Symbol& j, const Vector& a) { insert(j,a); }
|
||||
VectorMap() {}
|
||||
VectorMap(const VectorMap& cfg_in): values(cfg_in.values) {}
|
||||
VectorMap(varid_t j, const Vector& a) { insert(j,a); }
|
||||
|
||||
virtual ~VectorMap() {}
|
||||
|
||||
virtual ~VectorMap() {}
|
||||
/** return all the nodes in the graph **/
|
||||
std::vector<varid_t> get_names() const;
|
||||
|
||||
/** return all the nodes in the graph **/
|
||||
std::vector<Symbol> get_names() const;
|
||||
/** convert into a single large vector */
|
||||
Vector vector() const;
|
||||
|
||||
/** convert into a single large vector */
|
||||
Vector vector() const;
|
||||
/** Insert a value into the configuration with a given index */
|
||||
VectorMap& insert(varid_t name, const Vector& v);
|
||||
|
||||
/** Insert a value into the configuration with a given index */
|
||||
VectorMap& insert(const Symbol& name, const Vector& v);
|
||||
/** Insert or add a value with given index */
|
||||
VectorMap& insertAdd(varid_t j, const Vector& v);
|
||||
|
||||
/** Insert or add a value with given index */
|
||||
VectorMap& insertAdd(const Symbol& j, const Vector& v);
|
||||
/** Insert a config into another config */
|
||||
void insert(const VectorMap& config);
|
||||
|
||||
/** Insert a config into another config */
|
||||
void insert(const VectorMap& config);
|
||||
/** Insert a config into another config, add if key already exists */
|
||||
void insertAdd(const VectorMap& config);
|
||||
|
||||
/** Insert a config into another config, add if key already exists */
|
||||
void insertAdd(const VectorMap& config);
|
||||
const_iterator begin() const {return values.begin();}
|
||||
const_iterator end() const {return values.end();}
|
||||
iterator begin() {return values.begin();}
|
||||
iterator end() {return values.end();}
|
||||
|
||||
const_iterator begin() const {return values.begin();}
|
||||
const_iterator end() const {return values.end();}
|
||||
iterator begin() {return values.begin();}
|
||||
iterator end() {return values.end();}
|
||||
/** Vector access in VectorMap is via a Vector reference */
|
||||
Vector& operator[](varid_t j);
|
||||
const Vector& operator[](varid_t j) const;
|
||||
|
||||
/** Vector access in VectorMap is via a Vector reference */
|
||||
Vector& operator[](const Symbol& j);
|
||||
const Vector& operator[](const Symbol& j) const;
|
||||
/** [set] and [get] provided for access via MATLAB */
|
||||
inline Vector& get(varid_t j) { return (*this)[j];}
|
||||
void set(varid_t j, const Vector& v) { (*this)[j] = v; }
|
||||
inline const Vector& get(varid_t j) const { return (*this)[j];}
|
||||
|
||||
/** [set] and [get] provided for access via MATLAB */
|
||||
inline Vector& get(const Symbol& j) { return (*this)[j];}
|
||||
void set(const Symbol& j, const Vector& v) { (*this)[j] = v; }
|
||||
inline const Vector& get(const Symbol& j) const { return (*this)[j];}
|
||||
bool contains(varid_t name) const {
|
||||
const_iterator it = values.find(name);
|
||||
return (it!=values.end());
|
||||
}
|
||||
|
||||
bool contains(const Symbol& name) const {
|
||||
const_iterator it = values.find(name);
|
||||
return (it!=values.end());
|
||||
}
|
||||
/** Nr of vectors */
|
||||
size_t size() const { return values.size();}
|
||||
|
||||
/** Nr of vectors */
|
||||
size_t size() const { return values.size();}
|
||||
/** Total dimensionality */
|
||||
size_t dim() const;
|
||||
|
||||
/** Total dimensionality */
|
||||
size_t dim() const;
|
||||
/** max of the vectors */
|
||||
inline double max() const {
|
||||
double m = -std::numeric_limits<double>::infinity();
|
||||
for(const_iterator it=begin(); it!=end(); it++)
|
||||
m = std::max(m, gtsam::max(it->second));
|
||||
return m;
|
||||
}
|
||||
|
||||
/** max of the vectors */
|
||||
inline double max() const {
|
||||
double m = -std::numeric_limits<double>::infinity();
|
||||
for(const_iterator it=begin(); it!=end(); it++)
|
||||
m = std::max(m, gtsam::max(it->second));
|
||||
return m;
|
||||
}
|
||||
/** Scale */
|
||||
VectorMap scale(double s) const;
|
||||
VectorMap operator*(double s) const;
|
||||
|
||||
/** Scale */
|
||||
VectorMap scale(double s) const;
|
||||
VectorMap operator*(double s) const;
|
||||
/** Negation */
|
||||
VectorMap operator-() const;
|
||||
|
||||
/** Negation */
|
||||
VectorMap operator-() const;
|
||||
/** Add in place */
|
||||
void operator+=(const VectorMap &b);
|
||||
|
||||
/** Add in place */
|
||||
void operator+=(const VectorMap &b);
|
||||
/** Add */
|
||||
VectorMap operator+(const VectorMap &b) const;
|
||||
|
||||
/** Add */
|
||||
VectorMap operator+(const VectorMap &b) const;
|
||||
/** Subtract */
|
||||
VectorMap operator-(const VectorMap &b) const;
|
||||
|
||||
/** Subtract */
|
||||
VectorMap operator-(const VectorMap &b) const;
|
||||
/** print */
|
||||
void print(const std::string& name = "") const;
|
||||
|
||||
/** print */
|
||||
void print(const std::string& name = "") const;
|
||||
/** equals, for unit testing */
|
||||
bool equals(const VectorMap& expected, double tol=1e-9) const;
|
||||
|
||||
/** equals, for unit testing */
|
||||
bool equals(const VectorMap& expected, double tol=1e-9) const;
|
||||
void clear() {values.clear();}
|
||||
|
||||
void clear() {values.clear();}
|
||||
/** Dot product */
|
||||
double dot(const VectorMap& b) const;
|
||||
|
||||
/** Dot product */
|
||||
double dot(const VectorMap& b) const;
|
||||
/** Set all vectors to zero */
|
||||
VectorMap& zero();
|
||||
|
||||
/** Set all vectors to zero */
|
||||
VectorMap& zero();
|
||||
/** Create a clone of x with exactly same structure, except with zero values */
|
||||
static VectorMap zero(const VectorMap& x);
|
||||
|
||||
/** Create a clone of x with exactly same structure, except with zero values */
|
||||
static VectorMap zero(const VectorMap& x);
|
||||
/**
|
||||
* Add a delta config, needed for use in NonlinearOptimizer
|
||||
* For VectorMap, this is just addition.
|
||||
*/
|
||||
friend VectorMap expmap(const VectorMap& original, const VectorMap& delta);
|
||||
|
||||
/**
|
||||
* Add a delta config, needed for use in NonlinearOptimizer
|
||||
* For VectorMap, this is just addition.
|
||||
*/
|
||||
friend VectorMap expmap(const VectorMap& original, const VectorMap& delta);
|
||||
/**
|
||||
* Add a delta vector (not a config)
|
||||
* Will use the ordering that map uses to loop over vectors
|
||||
*/
|
||||
friend VectorMap expmap(const VectorMap& original, const Vector& delta);
|
||||
|
||||
/**
|
||||
* Add a delta vector (not a config)
|
||||
* Will use the ordering that map uses to loop over vectors
|
||||
*/
|
||||
friend VectorMap expmap(const VectorMap& original, const Vector& delta);
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version)
|
||||
{
|
||||
ar & BOOST_SERIALIZATION_NVP(values);
|
||||
}
|
||||
}; // VectorMap
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class Archive>
|
||||
void serialize(Archive & ar, const unsigned int version)
|
||||
{
|
||||
ar & BOOST_SERIALIZATION_NVP(values);
|
||||
}
|
||||
}; // VectorMap
|
||||
/** scalar product */
|
||||
inline VectorMap operator*(double s, const VectorMap& x) {return x*s;}
|
||||
|
||||
/** scalar product */
|
||||
inline VectorMap operator*(double s, const VectorMap& x) {return x*s;}
|
||||
/** Dot product */
|
||||
double dot(const VectorMap&, const VectorMap&);
|
||||
|
||||
/** Dot product */
|
||||
double dot(const VectorMap&, const VectorMap&);
|
||||
/**
|
||||
* BLAS Level 1 scal: x <- alpha*x
|
||||
*/
|
||||
void scal(double alpha, VectorMap& x);
|
||||
|
||||
/**
|
||||
* BLAS Level 1 scal: x <- alpha*x
|
||||
*/
|
||||
void scal(double alpha, VectorMap& x);
|
||||
/**
|
||||
* BLAS Level 1 axpy: y <- alpha*x + y
|
||||
* UNSAFE !!!! Only works if x and y laid out in exactly same shape
|
||||
* Used in internal loop in iterative for fast conjugate gradients
|
||||
* Consider using other functions if this is not in hotspot
|
||||
*/
|
||||
void axpy(double alpha, const VectorMap& x, VectorMap& y);
|
||||
|
||||
/**
|
||||
* BLAS Level 1 axpy: y <- alpha*x + y
|
||||
* UNSAFE !!!! Only works if x and y laid out in exactly same shape
|
||||
* Used in internal loop in iterative for fast conjugate gradients
|
||||
* Consider using other functions if this is not in hotspot
|
||||
*/
|
||||
void axpy(double alpha, const VectorMap& x, VectorMap& y);
|
||||
/** dim function (for iterative::CGD) */
|
||||
inline double dim(const VectorMap& x) { return x.dim();}
|
||||
|
||||
/** dim function (for iterative::CGD) */
|
||||
inline double dim(const VectorMap& x) { return x.dim();}
|
||||
/** max of the vectors */
|
||||
inline double max(const VectorMap& x) { return x.max();}
|
||||
|
||||
/** max of the vectors */
|
||||
inline double max(const VectorMap& x) { return x.max();}
|
||||
|
||||
/** print with optional string */
|
||||
void print(const VectorMap& v, const std::string& s = "");
|
||||
/** print with optional string */
|
||||
void print(const VectorMap& v, const std::string& s = "");
|
||||
|
||||
} // gtsam
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue