diff --git a/.github/scripts/python.sh b/.github/scripts/python.sh index 0855dbc21..718eee5ea 100644 --- a/.github/scripts/python.sh +++ b/.github/scripts/python.sh @@ -43,46 +43,68 @@ if [ -z ${PYTHON_VERSION+x} ]; then exit 127 fi -PYTHON="python${PYTHON_VERSION}" +export PYTHON="python${PYTHON_VERSION}" -if [[ $(uname) == "Darwin" ]]; then +function install_dependencies() +{ + if [[ $(uname) == "Darwin" ]]; then brew install wget -else + else # Install a system package required by our library sudo apt-get install -y wget libicu-dev python3-pip python3-setuptools -fi + fi -PATH=$PATH:$($PYTHON -c "import site; print(site.USER_BASE)")/bin + export PATH=$PATH:$($PYTHON -c "import site; print(site.USER_BASE)")/bin -[ "${GTSAM_WITH_TBB:-OFF}" = "ON" ] && install_tbb + [ "${GTSAM_WITH_TBB:-OFF}" = "ON" ] && install_tbb + + $PYTHON -m pip install -r $GITHUB_WORKSPACE/python/requirements.txt +} + +function build() +{ + mkdir $GITHUB_WORKSPACE/build + cd $GITHUB_WORKSPACE/build + + BUILD_PYBIND="ON" + + cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \ + -DGTSAM_BUILD_TESTS=OFF \ + -DGTSAM_BUILD_UNSTABLE=${GTSAM_BUILD_UNSTABLE:-ON} \ + -DGTSAM_USE_QUATERNIONS=OFF \ + -DGTSAM_WITH_TBB=${GTSAM_WITH_TBB:-OFF} \ + -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF \ + -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ + -DGTSAM_BUILD_PYTHON=${BUILD_PYBIND} \ + -DGTSAM_UNSTABLE_BUILD_PYTHON=${GTSAM_BUILD_UNSTABLE:-ON} \ + -DGTSAM_PYTHON_VERSION=$PYTHON_VERSION \ + -DPYTHON_EXECUTABLE:FILEPATH=$(which $PYTHON) \ + -DGTSAM_ALLOW_DEPRECATED_SINCE_V42=OFF \ + -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/gtsam_install -BUILD_PYBIND="ON" + # Set to 2 cores so that Actions does not error out during resource provisioning. + make -j2 install -sudo $PYTHON -m pip install -r $GITHUB_WORKSPACE/python/requirements.txt + cd $GITHUB_WORKSPACE/build/python + $PYTHON -m pip install --user . +} -mkdir $GITHUB_WORKSPACE/build -cd $GITHUB_WORKSPACE/build +function test() +{ + cd $GITHUB_WORKSPACE/python/gtsam/tests + $PYTHON -m unittest discover -v +} -cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \ - -DGTSAM_BUILD_TESTS=OFF \ - -DGTSAM_BUILD_UNSTABLE=${GTSAM_BUILD_UNSTABLE:-ON} \ - -DGTSAM_USE_QUATERNIONS=OFF \ - -DGTSAM_WITH_TBB=${GTSAM_WITH_TBB:-OFF} \ - -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF \ - -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ - -DGTSAM_BUILD_PYTHON=${BUILD_PYBIND} \ - -DGTSAM_UNSTABLE_BUILD_PYTHON=${GTSAM_BUILD_UNSTABLE:-ON} \ - -DGTSAM_PYTHON_VERSION=$PYTHON_VERSION \ - -DPYTHON_EXECUTABLE:FILEPATH=$(which $PYTHON) \ - -DGTSAM_ALLOW_DEPRECATED_SINCE_V42=OFF \ - -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/gtsam_install - - -# Set to 2 cores so that Actions does not error out during resource provisioning. -make -j2 install - -cd $GITHUB_WORKSPACE/build/python -$PYTHON -m pip install --user . -cd $GITHUB_WORKSPACE/python/gtsam/tests -$PYTHON -m unittest discover -v +# select between build or test +case $1 in + -d) + install_dependencies + ;; + -b) + build + ;; + -t) + test + ;; +esac diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index 7b13b6646..fa2425e4d 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -20,26 +20,26 @@ jobs: # Github Actions requires a single row to be added to the build matrix. # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. name: [ - ubuntu-18.04-gcc-5, - ubuntu-18.04-gcc-9, - ubuntu-18.04-clang-9, + ubuntu-20.04-gcc-7, + ubuntu-20.04-gcc-9, + ubuntu-20.04-clang-9, ] build_type: [Debug, Release] build_unstable: [ON] include: - - name: ubuntu-18.04-gcc-5 - os: ubuntu-18.04 + - name: ubuntu-20.04-gcc-7 + os: ubuntu-20.04 compiler: gcc - version: "5" + version: "7" - - name: ubuntu-18.04-gcc-9 - os: ubuntu-18.04 + - name: ubuntu-20.04-gcc-9 + os: ubuntu-20.04 compiler: gcc version: "9" - - name: ubuntu-18.04-clang-9 - os: ubuntu-18.04 + - name: ubuntu-20.04-clang-9 + os: ubuntu-20.04 compiler: clang version: "9" @@ -60,9 +60,9 @@ jobs: gpg -a --export $LLVM_KEY | sudo apt-key add - sudo add-apt-repository "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main" fi - sudo apt-get -y update - sudo apt-get -y install cmake build-essential pkg-config libpython-dev python-numpy libicu-dev + sudo apt-get -y update + sudo apt-get -y install cmake build-essential pkg-config libpython3-dev python3-numpy libicu-dev if [ "${{ matrix.compiler }}" = "gcc" ]; then sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index f80b9362c..4eb861ecc 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -19,34 +19,34 @@ jobs: # Github Actions requires a single row to be added to the build matrix. # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. name: [ - ubuntu-18.04-gcc-5, - ubuntu-18.04-gcc-9, - ubuntu-18.04-clang-9, + ubuntu-20.04-gcc-7, + ubuntu-20.04-gcc-9, + ubuntu-20.04-clang-9, macOS-11-xcode-13.4.1, - ubuntu-18.04-gcc-5-tbb, + ubuntu-20.04-gcc-7-tbb, ] build_type: [Debug, Release] python_version: [3] include: - - name: ubuntu-18.04-gcc-5 - os: ubuntu-18.04 + - name: ubuntu-20.04-gcc-7 + os: ubuntu-20.04 compiler: gcc - version: "5" + version: "7" - - name: ubuntu-18.04-gcc-9 - os: ubuntu-18.04 + - name: ubuntu-20.04-gcc-9 + os: ubuntu-20.04 compiler: gcc version: "9" - - name: ubuntu-18.04-clang-9 - os: ubuntu-18.04 + - name: ubuntu-20.04-clang-9 + os: ubuntu-20.04 compiler: clang version: "9" # NOTE temporarily added this as it is a required check. - - name: ubuntu-18.04-clang-9 - os: ubuntu-18.04 + - name: ubuntu-20.04-clang-9 + os: ubuntu-20.04 compiler: clang version: "9" build_type: Debug @@ -57,10 +57,10 @@ jobs: compiler: xcode version: "13.4.1" - - name: ubuntu-18.04-gcc-5-tbb - os: ubuntu-18.04 + - name: ubuntu-20.04-gcc-7-tbb + os: ubuntu-20.04 compiler: gcc - version: "5" + version: "7" flag: tbb steps: @@ -79,9 +79,9 @@ jobs: gpg -a --export $LLVM_KEY | sudo apt-key add - sudo add-apt-repository "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main" fi + sudo apt-get -y update - - sudo apt-get -y install cmake build-essential pkg-config libpython-dev python-numpy libboost-all-dev + sudo apt-get -y install cmake build-essential pkg-config libpython3-dev python3-numpy libboost-all-dev if [ "${{ matrix.compiler }}" = "gcc" ]; then sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib @@ -117,11 +117,12 @@ jobs: uses: pierotofy/set-swap-space@master with: swap-size-gb: 6 - - name: Build (Linux) - if: runner.os == 'Linux' + - name: Install Dependencies run: | - bash .github/scripts/python.sh - - name: Build (macOS) - if: runner.os == 'macOS' + bash .github/scripts/python.sh -d + - name: Build run: | - bash .github/scripts/python.sh + bash .github/scripts/python.sh -b + - name: Test + run: | + bash .github/scripts/python.sh -t diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index d357b9a34..ef7d7723d 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -32,31 +32,31 @@ jobs: include: - name: ubuntu-gcc-deprecated - os: ubuntu-18.04 + os: ubuntu-20.04 compiler: gcc version: "9" flag: deprecated - name: ubuntu-gcc-quaternions - os: ubuntu-18.04 + os: ubuntu-20.04 compiler: gcc version: "9" flag: quaternions - name: ubuntu-gcc-tbb - os: ubuntu-18.04 + os: ubuntu-20.04 compiler: gcc version: "9" flag: tbb - name: ubuntu-cayleymap - os: ubuntu-18.04 + os: ubuntu-20.04 compiler: gcc version: "9" flag: cayley - name: ubuntu-system-libs - os: ubuntu-18.04 + os: ubuntu-20.04 compiler: gcc version: "9" flag: system-libs @@ -74,9 +74,9 @@ jobs: gpg -a --export 15CF4D18AF4F7421 | sudo apt-key add - sudo add-apt-repository "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main" fi - sudo apt-get -y update - sudo apt-get -y install cmake build-essential pkg-config libpython-dev python-numpy libicu-dev + sudo apt-get -y update + sudo apt-get -y install cmake build-essential pkg-config libpython3-dev python3-numpy libicu-dev if [ "${{ matrix.compiler }}" = "gcc" ]; then sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib diff --git a/README.md b/README.md index 52ac0a5d8..b32ce70e0 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,29 @@ GTSAM 4.1 added a new pybind wrapper, and **removed** the deprecated functionali We provide support for [MATLAB](matlab/README.md) and [Python](python/README.md) wrappers for GTSAM. Please refer to the linked documents for more details. +## Citation + +If you are using GTSAM for academic work, please use the following citation: + +``` +@software{gtsam, + author = {Frank Dellaert and Richard Roberts and Varun Agrawal and Alex Cunningham and Chris Beall and Duy-Nguyen Ta and Fan Jiang and lucacarlone and nikai and Jose Luis Blanco-Claraco and Stephen Williams and ydjian and John Lambert and Andy Melim and Zhaoyang Lv and Akshay Krishnan and Jing Dong and Gerry Chen and Krunal Chande and balderdash-devil and DiffDecisionTrees and Sungtae An and mpaluri and Ellon Paiva Mendes and Mike Bosse and Akash Patel and Ayush Baid and Paul Furgale and matthewbroadwaynavenio and roderick-koehle}, + title = {borglab/gtsam}, + month = may, + year = 2022, + publisher = {Zenodo}, + version = {4.2a7}, + doi = {10.5281/zenodo.5794541}, + url = {https://doi.org/10.5281/zenodo.5794541} +} +``` + +You can also get the latest citation available from Zenodo below: + +[![DOI](https://zenodo.org/badge/86362856.svg)](https://doi.org/10.5281/zenodo.5794541) + +Specific formats are available in the bottom-right corner of the Zenodo page. + ## The Preintegrated IMU Factor GTSAM includes a state of the art IMU handling scheme based on @@ -73,7 +96,7 @@ GTSAM includes a state of the art IMU handling scheme based on Our implementation improves on this using integration on the manifold, as detailed in - Luca Carlone, Zsolt Kira, Chris Beall, Vadim Indelman, and Frank Dellaert, _"Eliminating conditionally independent sets in factor graphs: a unifying perspective based on smart factors"_, Int. Conf. on Robotics and Automation (ICRA), 2014. [[link]](https://ieeexplore.ieee.org/abstract/document/6907483) -- Christian Forster, Luca Carlone, Frank Dellaert, and Davide Scaramuzza, "IMU Preintegration on Manifold for Efficient Visual-Inertial Maximum-a-Posteriori Estimation", Robotics: Science and Systems (RSS), 2015. [[link]](http://www.roboticsproceedings.org/rss11/p06.pdf) +- Christian Forster, Luca Carlone, Frank Dellaert, and Davide Scaramuzza, _"IMU Preintegration on Manifold for Efficient Visual-Inertial Maximum-a-Posteriori Estimation"_, Robotics: Science and Systems (RSS), 2015. [[link]](http://www.roboticsproceedings.org/rss11/p06.pdf) If you are using the factor in academic work, please cite the publications above. diff --git a/cmake/FindBoost.cmake b/cmake/FindBoost.cmake deleted file mode 100644 index 5f7cb5ddc..000000000 --- a/cmake/FindBoost.cmake +++ /dev/null @@ -1,2347 +0,0 @@ -# Distributed under the OSI-approved BSD 3-Clause License. See accompanying -# file Copyright.txt or https://cmake.org/licensing for details. - -#[=======================================================================[.rst: -FindBoost ---------- - -Find Boost include dirs and libraries - -Use this module by invoking find_package with the form:: - - find_package(Boost - [version] [EXACT] # Minimum or EXACT version e.g. 1.67.0 - [REQUIRED] # Fail with error if Boost is not found - [COMPONENTS ...] # Boost libraries by their canonical name - # e.g. "date_time" for "libboost_date_time" - [OPTIONAL_COMPONENTS ...] - # Optional Boost libraries by their canonical name) - ) # e.g. "date_time" for "libboost_date_time" - -This module finds headers and requested component libraries OR a CMake -package configuration file provided by a "Boost CMake" build. For the -latter case skip to the "Boost CMake" section below. For the former -case results are reported in variables:: - - Boost_FOUND - True if headers and requested libraries were found - Boost_INCLUDE_DIRS - Boost include directories - Boost_LIBRARY_DIRS - Link directories for Boost libraries - Boost_LIBRARIES - Boost component libraries to be linked - Boost__FOUND - True if component was found ( is upper-case) - Boost__LIBRARY - Libraries to link for component (may include - target_link_libraries debug/optimized keywords) - Boost_VERSION_MACRO - BOOST_VERSION value from boost/version.hpp - Boost_VERSION_STRING - Boost version number in x.y.z format - Boost_VERSION - if CMP0093 NEW => same as Boost_VERSION_STRING - if CMP0093 OLD or unset => same as Boost_VERSION_MACRO - Boost_LIB_VERSION - Version string appended to library filenames - Boost_VERSION_MAJOR - Boost major version number (X in X.y.z) - alias: Boost_MAJOR_VERSION - Boost_VERSION_MINOR - Boost minor version number (Y in x.Y.z) - alias: Boost_MINOR_VERSION - Boost_VERSION_PATCH - Boost subminor version number (Z in x.y.Z) - alias: Boost_SUBMINOR_VERSION - Boost_VERSION_COUNT - Amount of version components (3) - Boost_LIB_DIAGNOSTIC_DEFINITIONS (Windows) - - Pass to add_definitions() to have diagnostic - information about Boost's automatic linking - displayed during compilation - -Note that Boost Python components require a Python version suffix -(Boost 1.67 and later), e.g. ``python36`` or ``python27`` for the -versions built against Python 3.6 and 2.7, respectively. This also -applies to additional components using Python including -``mpi_python`` and ``numpy``. Earlier Boost releases may use -distribution-specific suffixes such as ``2``, ``3`` or ``2.7``. -These may also be used as suffixes, but note that they are not -portable. - -This module reads hints about search locations from variables:: - - BOOST_ROOT - Preferred installation prefix - (or BOOSTROOT) - BOOST_INCLUDEDIR - Preferred include directory e.g. /include - BOOST_LIBRARYDIR - Preferred library directory e.g. /lib - Boost_NO_SYSTEM_PATHS - Set to ON to disable searching in locations not - specified by these hint variables. Default is OFF. - Boost_ADDITIONAL_VERSIONS - - List of Boost versions not known to this module - (Boost install locations may contain the version) - -and saves search results persistently in CMake cache entries:: - - Boost_INCLUDE_DIR - Directory containing Boost headers - Boost_LIBRARY_DIR_RELEASE - Directory containing release Boost libraries - Boost_LIBRARY_DIR_DEBUG - Directory containing debug Boost libraries - Boost__LIBRARY_DEBUG - Component library debug variant - Boost__LIBRARY_RELEASE - Component library release variant - -The following :prop_tgt:`IMPORTED` targets are also defined:: - - Boost::headers - Target for header-only dependencies - (Boost include directory) - alias: Boost::boost - Boost:: - Target for specific component dependency - (shared or static library); is lower- - case - Boost::diagnostic_definitions - interface target to enable diagnostic - information about Boost's automatic linking - during compilation (adds BOOST_LIB_DIAGNOSTIC) - Boost::disable_autolinking - interface target to disable automatic - linking with MSVC (adds BOOST_ALL_NO_LIB) - Boost::dynamic_linking - interface target to enable dynamic linking - linking with MSVC (adds BOOST_ALL_DYN_LINK) - -Implicit dependencies such as ``Boost::filesystem`` requiring -``Boost::system`` will be automatically detected and satisfied, even -if system is not specified when using :command:`find_package` and if -``Boost::system`` is not added to :command:`target_link_libraries`. If using -``Boost::thread``, then ``Threads::Threads`` will also be added automatically. - -It is important to note that the imported targets behave differently -than variables created by this module: multiple calls to -:command:`find_package(Boost)` in the same directory or sub-directories with -different options (e.g. static or shared) will not override the -values of the targets created by the first call. - -Users may set these hints or results as ``CACHE`` entries. Projects -should not read these entries directly but instead use the above -result variables. Note that some hint names start in upper-case -"BOOST". One may specify these as environment variables if they are -not specified as CMake variables or cache entries. - -This module first searches for the ``Boost`` header files using the above -hint variables (excluding ``BOOST_LIBRARYDIR``) and saves the result in -``Boost_INCLUDE_DIR``. Then it searches for requested component libraries -using the above hints (excluding ``BOOST_INCLUDEDIR`` and -``Boost_ADDITIONAL_VERSIONS``), "lib" directories near ``Boost_INCLUDE_DIR``, -and the library name configuration settings below. It saves the -library directories in ``Boost_LIBRARY_DIR_DEBUG`` and -``Boost_LIBRARY_DIR_RELEASE`` and individual library -locations in ``Boost__LIBRARY_DEBUG`` and ``Boost__LIBRARY_RELEASE``. -When one changes settings used by previous searches in the same build -tree (excluding environment variables) this module discards previous -search results affected by the changes and searches again. - -Boost libraries come in many variants encoded in their file name. -Users or projects may tell this module which variant to find by -setting variables:: - - Boost_USE_DEBUG_LIBS - Set to ON or OFF to specify whether to search - and use the debug libraries. Default is ON. - Boost_USE_RELEASE_LIBS - Set to ON or OFF to specify whether to search - and use the release libraries. Default is ON. - Boost_USE_MULTITHREADED - Set to OFF to use the non-multithreaded - libraries ('mt' tag). Default is ON. - Boost_USE_STATIC_LIBS - Set to ON to force the use of the static - libraries. Default is OFF. - Boost_USE_STATIC_RUNTIME - Set to ON or OFF to specify whether to use - libraries linked statically to the C++ runtime - ('s' tag). Default is platform dependent. - Boost_USE_DEBUG_RUNTIME - Set to ON or OFF to specify whether to use - libraries linked to the MS debug C++ runtime - ('g' tag). Default is ON. - Boost_USE_DEBUG_PYTHON - Set to ON to use libraries compiled with a - debug Python build ('y' tag). Default is OFF. - Boost_USE_STLPORT - Set to ON to use libraries compiled with - STLPort ('p' tag). Default is OFF. - Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS - - Set to ON to use libraries compiled with - STLPort deprecated "native iostreams" - ('n' tag). Default is OFF. - Boost_COMPILER - Set to the compiler-specific library suffix - (e.g. "-gcc43"). Default is auto-computed - for the C++ compiler in use. A list may be - used if multiple compatible suffixes should - be tested for, in decreasing order of - preference. - Boost_ARCHITECTURE - Set to the architecture-specific library suffix - (e.g. "-x64"). Default is auto-computed for the - C++ compiler in use. - Boost_THREADAPI - Suffix for "thread" component library name, - such as "pthread" or "win32". Names with - and without this suffix will both be tried. - Boost_NAMESPACE - Alternate namespace used to build boost with - e.g. if set to "myboost", will search for - myboost_thread instead of boost_thread. - -Other variables one may set to control this module are:: - - Boost_DEBUG - Set to ON to enable debug output from FindBoost. - Please enable this before filing any bug report. - Boost_REALPATH - Set to ON to resolve symlinks for discovered - libraries to assist with packaging. For example, - the "system" component library may be resolved to - "/usr/lib/libboost_system.so.1.67.0" instead of - "/usr/lib/libboost_system.so". This does not - affect linking and should not be enabled unless - the user needs this information. - Boost_LIBRARY_DIR - Default value for Boost_LIBRARY_DIR_RELEASE and - Boost_LIBRARY_DIR_DEBUG. - -On Visual Studio and Borland compilers Boost headers request automatic -linking to corresponding libraries. This requires matching libraries -to be linked explicitly or available in the link library search path. -In this case setting ``Boost_USE_STATIC_LIBS`` to ``OFF`` may not achieve -dynamic linking. Boost automatic linking typically requests static -libraries with a few exceptions (such as ``Boost.Python``). Use:: - - add_definitions(${Boost_LIB_DIAGNOSTIC_DEFINITIONS}) - -to ask Boost to report information about automatic linking requests. - -Example to find Boost headers only:: - - find_package(Boost 1.36.0) - if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) - add_executable(foo foo.cc) - endif() - -Example to find Boost libraries and use imported targets:: - - find_package(Boost 1.56 REQUIRED COMPONENTS - date_time filesystem iostreams) - add_executable(foo foo.cc) - target_link_libraries(foo Boost::date_time Boost::filesystem - Boost::iostreams) - -Example to find Boost Python 3.6 libraries and use imported targets:: - - find_package(Boost 1.67 REQUIRED COMPONENTS - python36 numpy36) - add_executable(foo foo.cc) - target_link_libraries(foo Boost::python36 Boost::numpy36) - -Example to find Boost headers and some *static* (release only) libraries:: - - set(Boost_USE_STATIC_LIBS ON) # only find static libs - set(Boost_USE_DEBUG_LIBS OFF) # ignore debug libs and - set(Boost_USE_RELEASE_LIBS ON) # only find release libs - set(Boost_USE_MULTITHREADED ON) - set(Boost_USE_STATIC_RUNTIME OFF) - find_package(Boost 1.66.0 COMPONENTS date_time filesystem system ...) - if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) - add_executable(foo foo.cc) - target_link_libraries(foo ${Boost_LIBRARIES}) - endif() - -Boost CMake -^^^^^^^^^^^ - -If Boost was built using the boost-cmake project or from Boost 1.70.0 on -it provides a package configuration file for use with find_package's config mode. -This module looks for the package configuration file called -``BoostConfig.cmake`` or ``boost-config.cmake`` and stores the result in -``CACHE`` entry "Boost_DIR". If found, the package configuration file is loaded -and this module returns with no further action. See documentation of -the Boost CMake package configuration for details on what it provides. - -Set ``Boost_NO_BOOST_CMAKE`` to ``ON``, to disable the search for boost-cmake. -#]=======================================================================] - -# The FPHSA helper provides standard way of reporting final search results to -# the user including the version and component checks. - -# Patch for GTSAM: -#include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake) -include(FindPackageHandleStandardArgs) - -# Save project's policies -cmake_policy(PUSH) -cmake_policy(SET CMP0057 NEW) # if IN_LIST - -function(_boost_get_existing_target component target_var) - set(names "${component}") - if(component MATCHES "^([a-z_]*)(python|numpy)([1-9])\\.?([0-9])?$") - # handle pythonXY and numpyXY versioned components and also python X.Y, mpi_python etc. - list(APPEND names - "${CMAKE_MATCH_1}${CMAKE_MATCH_2}" # python - "${CMAKE_MATCH_1}${CMAKE_MATCH_2}${CMAKE_MATCH_3}" # pythonX - "${CMAKE_MATCH_1}${CMAKE_MATCH_2}${CMAKE_MATCH_3}${CMAKE_MATCH_4}" #pythonXY - ) - endif() - # https://github.com/boost-cmake/boost-cmake uses boost::file_system etc. - # So handle similar constructions of target names - string(TOLOWER "${component}" lower_component) - list(APPEND names "${lower_component}") - foreach(prefix Boost boost) - foreach(name IN LISTS names) - if(TARGET "${prefix}::${name}") - # The target may be an INTERFACE library that wraps around a single other - # target for compatibility. Unwrap this layer so we can extract real info. - if("${name}" MATCHES "^(python|numpy|mpi_python)([1-9])([0-9])$") - set(name_nv "${CMAKE_MATCH_1}") - if(TARGET "${prefix}::${name_nv}") - get_property(type TARGET "${prefix}::${name}" PROPERTY TYPE) - if(type STREQUAL "INTERFACE_LIBRARY") - get_property(lib TARGET "${prefix}::${name}" PROPERTY INTERFACE_LINK_LIBRARIES) - if("${lib}" STREQUAL "${prefix}::${name_nv}") - set(${target_var} "${prefix}::${name_nv}" PARENT_SCOPE) - return() - endif() - endif() - endif() - endif() - set(${target_var} "${prefix}::${name}" PARENT_SCOPE) - return() - endif() - endforeach() - endforeach() - set(${target_var} "" PARENT_SCOPE) -endfunction() - -function(_boost_get_canonical_target_name component target_var) - string(TOLOWER "${component}" component) - if(component MATCHES "^([a-z_]*)(python|numpy)([1-9])\\.?([0-9])?$") - # handle pythonXY and numpyXY versioned components and also python X.Y, mpi_python etc. - set(${target_var} "Boost::${CMAKE_MATCH_1}${CMAKE_MATCH_2}" PARENT_SCOPE) - else() - set(${target_var} "Boost::${component}" PARENT_SCOPE) - endif() -endfunction() - -macro(_boost_set_in_parent_scope name value) - # Set a variable in parent scope and make it visibile in current scope - set(${name} "${value}" PARENT_SCOPE) - set(${name} "${value}") -endmacro() - -macro(_boost_set_if_unset name value) - if(NOT ${name}) - _boost_set_in_parent_scope(${name} "${value}") - endif() -endmacro() - -macro(_boost_set_cache_if_unset name value) - if(NOT ${name}) - set(${name} "${value}" CACHE STRING "" FORCE) - endif() -endmacro() - -macro(_boost_append_include_dir target) - get_target_property(inc "${target}" INTERFACE_INCLUDE_DIRECTORIES) - if(inc) - list(APPEND include_dirs "${inc}") - endif() -endmacro() - -function(_boost_set_legacy_variables_from_config) - # Set legacy variables for compatibility if not set - set(include_dirs "") - set(library_dirs "") - set(libraries "") - # Header targets Boost::headers or Boost::boost - foreach(comp headers boost) - _boost_get_existing_target(${comp} target) - if(target) - _boost_append_include_dir("${target}") - endif() - endforeach() - # Library targets - foreach(comp IN LISTS Boost_FIND_COMPONENTS) - string(TOUPPER ${comp} uppercomp) - # Overwrite if set - _boost_set_in_parent_scope(Boost_${uppercomp}_FOUND "${Boost_${comp}_FOUND}") - if(Boost_${comp}_FOUND) - _boost_get_existing_target(${comp} target) - if(NOT target) - if(Boost_DEBUG OR Boost_VERBOSE) - message(WARNING "Could not find imported target for required component '${comp}'. Legacy variables for this component might be missing. Refer to the documentation of your Boost installation for help on variables to use.") - endif() - continue() - endif() - _boost_append_include_dir("${target}") - _boost_set_if_unset(Boost_${uppercomp}_LIBRARY "${target}") - _boost_set_if_unset(Boost_${uppercomp}_LIBRARIES "${target}") # Very old legacy variable - list(APPEND libraries "${target}") - get_property(type TARGET "${target}" PROPERTY TYPE) - if(NOT type STREQUAL "INTERFACE_LIBRARY") - foreach(cfg RELEASE DEBUG) - get_target_property(lib ${target} IMPORTED_LOCATION_${cfg}) - if(lib) - get_filename_component(lib_dir "${lib}" DIRECTORY) - list(APPEND library_dirs ${lib_dir}) - _boost_set_cache_if_unset(Boost_${uppercomp}_LIBRARY_${cfg} "${lib}") - endif() - endforeach() - elseif(Boost_DEBUG OR Boost_VERBOSE) - # For projects using only the Boost::* targets this warning can be safely ignored. - message(WARNING "Imported target '${target}' for required component '${comp}' has no artifact. Legacy variables for this component might be missing. Refer to the documentation of your Boost installation for help on variables to use.") - endif() - _boost_get_canonical_target_name("${comp}" canonical_target) - if(NOT TARGET "${canonical_target}") - add_library("${canonical_target}" INTERFACE IMPORTED) - target_link_libraries("${canonical_target}" INTERFACE "${target}") - endif() - endif() - endforeach() - list(REMOVE_DUPLICATES include_dirs) - list(REMOVE_DUPLICATES library_dirs) - _boost_set_if_unset(Boost_INCLUDE_DIRS "${include_dirs}") - _boost_set_if_unset(Boost_LIBRARY_DIRS "${library_dirs}") - _boost_set_if_unset(Boost_LIBRARIES "${libraries}") - _boost_set_if_unset(Boost_VERSION_STRING "${Boost_VERSION_MAJOR}.${Boost_VERSION_MINOR}.${Boost_VERSION_PATCH}") - find_path(Boost_INCLUDE_DIR - NAMES boost/version.hpp boost/config.hpp - HINTS ${Boost_INCLUDE_DIRS} - NO_DEFAULT_PATH - ) - if(NOT Boost_VERSION_MACRO OR NOT Boost_LIB_VERSION) - set(version_file ${Boost_INCLUDE_DIR}/boost/version.hpp) - if(EXISTS "${version_file}") - file(STRINGS "${version_file}" contents REGEX "#define BOOST_(LIB_)?VERSION ") - if(contents MATCHES "#define BOOST_VERSION ([0-9]+)") - _boost_set_if_unset(Boost_VERSION_MACRO "${CMAKE_MATCH_1}") - endif() - if(contents MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"") - _boost_set_if_unset(Boost_LIB_VERSION "${CMAKE_MATCH_1}") - endif() - endif() - endif() - _boost_set_if_unset(Boost_MAJOR_VERSION ${Boost_VERSION_MAJOR}) - _boost_set_if_unset(Boost_MINOR_VERSION ${Boost_VERSION_MINOR}) - _boost_set_if_unset(Boost_SUBMINOR_VERSION ${Boost_VERSION_PATCH}) - if(WIN32) - _boost_set_if_unset(Boost_LIB_DIAGNOSTIC_DEFINITIONS "-DBOOST_LIB_DIAGNOSTIC") - endif() - if(NOT TARGET Boost::headers) - add_library(Boost::headers INTERFACE IMPORTED) - target_include_directories(Boost::headers INTERFACE ${Boost_INCLUDE_DIRS}) - endif() - # Legacy targets w/o functionality as all handled by defined targets - foreach(lib diagnostic_definitions disable_autolinking dynamic_linking) - if(NOT TARGET Boost::${lib}) - add_library(Boost::${lib} INTERFACE IMPORTED) - endif() - endforeach() - if(NOT TARGET Boost::boost) - add_library(Boost::boost INTERFACE IMPORTED) - target_link_libraries(Boost::boost INTERFACE Boost::headers) - endif() -endfunction() - -#------------------------------------------------------------------------------- -# Before we go searching, check whether a boost cmake package is available, unless -# the user specifically asked NOT to search for one. -# -# If Boost_DIR is set, this behaves as any find_package call would. If not, -# it looks at BOOST_ROOT and BOOSTROOT to find Boost. -# -if (NOT Boost_NO_BOOST_CMAKE) - # If Boost_DIR is not set, look for BOOSTROOT and BOOST_ROOT as alternatives, - # since these are more conventional for Boost. - if ("$ENV{Boost_DIR}" STREQUAL "") - if (NOT "$ENV{BOOST_ROOT}" STREQUAL "") - set(ENV{Boost_DIR} $ENV{BOOST_ROOT}) - elseif (NOT "$ENV{BOOSTROOT}" STREQUAL "") - set(ENV{Boost_DIR} $ENV{BOOSTROOT}) - endif() - endif() - - # Do the same find_package call but look specifically for the CMake version. - # Note that args are passed in the Boost_FIND_xxxxx variables, so there is no - # need to delegate them to this find_package call. - find_package(Boost QUIET NO_MODULE) - mark_as_advanced(Boost_DIR) - - # If we found a boost cmake package, then we're done. Print out what we found. - # Otherwise let the rest of the module try to find it. - if(Boost_FOUND) - # Convert component found variables to standard variables if required - # Necessary for legacy boost-cmake and 1.70 builtin BoostConfig - if(Boost_FIND_COMPONENTS) - foreach(_comp IN LISTS Boost_FIND_COMPONENTS) - if(DEFINED Boost_${_comp}_FOUND) - continue() - endif() - string(TOUPPER ${_comp} _uppercomp) - if(DEFINED Boost${_comp}_FOUND) # legacy boost-cmake project - set(Boost_${_comp}_FOUND ${Boost${_comp}_FOUND}) - elseif(DEFINED Boost_${_uppercomp}_FOUND) # Boost 1.70 - set(Boost_${_comp}_FOUND ${Boost_${_uppercomp}_FOUND}) - endif() - endforeach() - endif() - - find_package_handle_standard_args(Boost HANDLE_COMPONENTS CONFIG_MODE) - _boost_set_legacy_variables_from_config() - - # Restore project's policies - cmake_policy(POP) - return() - endif() -endif() - - -#------------------------------------------------------------------------------- -# FindBoost functions & macros -# - -# -# Print debug text if Boost_DEBUG is set. -# Call example: -# _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "debug message") -# -function(_Boost_DEBUG_PRINT file line text) - if(Boost_DEBUG) - message(STATUS "[ ${file}:${line} ] ${text}") - endif() -endfunction() - -# -# _Boost_DEBUG_PRINT_VAR(file line variable_name [ENVIRONMENT] -# [SOURCE "short explanation of origin of var value"]) -# -# ENVIRONMENT - look up environment variable instead of CMake variable -# -# Print variable name and its value if Boost_DEBUG is set. -# Call example: -# _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" BOOST_ROOT) -# -function(_Boost_DEBUG_PRINT_VAR file line name) - if(Boost_DEBUG) - cmake_parse_arguments(_args "ENVIRONMENT" "SOURCE" "" ${ARGN}) - - unset(source) - if(_args_SOURCE) - set(source " (${_args_SOURCE})") - endif() - - if(_args_ENVIRONMENT) - if(DEFINED ENV{${name}}) - set(value "\"$ENV{${name}}\"") - else() - set(value "") - endif() - set(_name "ENV{${name}}") - else() - if(DEFINED "${name}") - set(value "\"${${name}}\"") - else() - set(value "") - endif() - set(_name "${name}") - endif() - - _Boost_DEBUG_PRINT("${file}" "${line}" "${_name} = ${value}${source}") - endif() -endfunction() - -############################################ -# -# Check the existence of the libraries. -# -############################################ -# This macro was taken directly from the FindQt4.cmake file that is included -# with the CMake distribution. This is NOT my work. All work was done by the -# original authors of the FindQt4.cmake file. Only minor modifications were -# made to remove references to Qt and make this file more generally applicable -# And ELSE/ENDIF pairs were removed for readability. -######################################################################### - -macro(_Boost_ADJUST_LIB_VARS basename) - if(Boost_INCLUDE_DIR ) - if(Boost_${basename}_LIBRARY_DEBUG AND Boost_${basename}_LIBRARY_RELEASE) - # if the generator is multi-config or if CMAKE_BUILD_TYPE is set for - # single-config generators, set optimized and debug libraries - get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) - if(_isMultiConfig OR CMAKE_BUILD_TYPE) - set(Boost_${basename}_LIBRARY optimized ${Boost_${basename}_LIBRARY_RELEASE} debug ${Boost_${basename}_LIBRARY_DEBUG}) - else() - # For single-config generators where CMAKE_BUILD_TYPE has no value, - # just use the release libraries - set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE} ) - endif() - # FIXME: This probably should be set for both cases - set(Boost_${basename}_LIBRARIES optimized ${Boost_${basename}_LIBRARY_RELEASE} debug ${Boost_${basename}_LIBRARY_DEBUG}) - endif() - - # if only the release version was found, set the debug variable also to the release version - if(Boost_${basename}_LIBRARY_RELEASE AND NOT Boost_${basename}_LIBRARY_DEBUG) - set(Boost_${basename}_LIBRARY_DEBUG ${Boost_${basename}_LIBRARY_RELEASE}) - set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE}) - set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_RELEASE}) - endif() - - # if only the debug version was found, set the release variable also to the debug version - if(Boost_${basename}_LIBRARY_DEBUG AND NOT Boost_${basename}_LIBRARY_RELEASE) - set(Boost_${basename}_LIBRARY_RELEASE ${Boost_${basename}_LIBRARY_DEBUG}) - set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_DEBUG}) - set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_DEBUG}) - endif() - - # If the debug & release library ends up being the same, omit the keywords - if("${Boost_${basename}_LIBRARY_RELEASE}" STREQUAL "${Boost_${basename}_LIBRARY_DEBUG}") - set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE} ) - set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_RELEASE} ) - endif() - - if(Boost_${basename}_LIBRARY AND Boost_${basename}_HEADER) - set(Boost_${basename}_FOUND ON) - if("x${basename}" STREQUAL "xTHREAD" AND NOT TARGET Threads::Threads) - string(APPEND Boost_ERROR_REASON_THREAD " (missing dependency: Threads)") - set(Boost_THREAD_FOUND OFF) - endif() - endif() - - endif() - # Make variables changeable to the advanced user - mark_as_advanced( - Boost_${basename}_LIBRARY_RELEASE - Boost_${basename}_LIBRARY_DEBUG - ) -endmacro() - -# Detect changes in used variables. -# Compares the current variable value with the last one. -# In short form: -# v != v_LAST -> CHANGED = 1 -# v is defined, v_LAST not -> CHANGED = 1 -# v is not defined, but v_LAST is -> CHANGED = 1 -# otherwise -> CHANGED = 0 -# CHANGED is returned in variable named ${changed_var} -macro(_Boost_CHANGE_DETECT changed_var) - set(${changed_var} 0) - foreach(v ${ARGN}) - if(DEFINED _Boost_COMPONENTS_SEARCHED) - if(${v}) - if(_${v}_LAST) - string(COMPARE NOTEQUAL "${${v}}" "${_${v}_LAST}" _${v}_CHANGED) - else() - set(_${v}_CHANGED 1) - endif() - elseif(_${v}_LAST) - set(_${v}_CHANGED 1) - endif() - if(_${v}_CHANGED) - set(${changed_var} 1) - endif() - else() - set(_${v}_CHANGED 0) - endif() - endforeach() -endmacro() - -# -# Find the given library (var). -# Use 'build_type' to support different lib paths for RELEASE or DEBUG builds -# -macro(_Boost_FIND_LIBRARY var build_type) - - find_library(${var} ${ARGN}) - - if(${var}) - # If this is the first library found then save Boost_LIBRARY_DIR_[RELEASE,DEBUG]. - if(NOT Boost_LIBRARY_DIR_${build_type}) - get_filename_component(_dir "${${var}}" PATH) - set(Boost_LIBRARY_DIR_${build_type} "${_dir}" CACHE PATH "Boost library directory ${build_type}" FORCE) - endif() - elseif(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) - # Try component-specific hints but do not save Boost_LIBRARY_DIR_[RELEASE,DEBUG]. - find_library(${var} HINTS ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT} ${ARGN}) - endif() - - # If Boost_LIBRARY_DIR_[RELEASE,DEBUG] is known then search only there. - if(Boost_LIBRARY_DIR_${build_type}) - set(_boost_LIBRARY_SEARCH_DIRS_${build_type} ${Boost_LIBRARY_DIR_${build_type}} NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "Boost_LIBRARY_DIR_${build_type}") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "_boost_LIBRARY_SEARCH_DIRS_${build_type}") - endif() -endmacro() - -#------------------------------------------------------------------------------- - -# Convert CMAKE_CXX_COMPILER_VERSION to boost compiler suffix version. -function(_Boost_COMPILER_DUMPVERSION _OUTPUT_VERSION _OUTPUT_VERSION_MAJOR _OUTPUT_VERSION_MINOR) - string(REGEX REPLACE "([0-9]+)\\.([0-9]+)(\\.[0-9]+)?" "\\1" - _boost_COMPILER_VERSION_MAJOR "${CMAKE_CXX_COMPILER_VERSION}") - string(REGEX REPLACE "([0-9]+)\\.([0-9]+)(\\.[0-9]+)?" "\\2" - _boost_COMPILER_VERSION_MINOR "${CMAKE_CXX_COMPILER_VERSION}") - - set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}${_boost_COMPILER_VERSION_MINOR}") - - set(${_OUTPUT_VERSION} ${_boost_COMPILER_VERSION} PARENT_SCOPE) - set(${_OUTPUT_VERSION_MAJOR} ${_boost_COMPILER_VERSION_MAJOR} PARENT_SCOPE) - set(${_OUTPUT_VERSION_MINOR} ${_boost_COMPILER_VERSION_MINOR} PARENT_SCOPE) -endfunction() - -# -# Take a list of libraries with "thread" in it -# and prepend duplicates with "thread_${Boost_THREADAPI}" -# at the front of the list -# -function(_Boost_PREPEND_LIST_WITH_THREADAPI _output) - set(_orig_libnames ${ARGN}) - string(REPLACE "thread" "thread_${Boost_THREADAPI}" _threadapi_libnames "${_orig_libnames}") - set(${_output} ${_threadapi_libnames} ${_orig_libnames} PARENT_SCOPE) -endfunction() - -# -# If a library is found, replace its cache entry with its REALPATH -# -function(_Boost_SWAP_WITH_REALPATH _library _docstring) - if(${_library}) - get_filename_component(_boost_filepathreal ${${_library}} REALPATH) - unset(${_library} CACHE) - set(${_library} ${_boost_filepathreal} CACHE FILEPATH "${_docstring}") - endif() -endfunction() - -function(_Boost_CHECK_SPELLING _var) - if(${_var}) - string(TOUPPER ${_var} _var_UC) - message(FATAL_ERROR "ERROR: ${_var} is not the correct spelling. The proper spelling is ${_var_UC}.") - endif() -endfunction() - -# Guesses Boost's compiler prefix used in built library names -# Returns the guess by setting the variable pointed to by _ret -function(_Boost_GUESS_COMPILER_PREFIX _ret) - if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xIntel") - if(WIN32) - set (_boost_COMPILER "-iw") - else() - set (_boost_COMPILER "-il") - endif() - elseif (GHSMULTI) - set(_boost_COMPILER "-ghs") - elseif("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC" OR "x${CMAKE_CXX_SIMULATE_ID}" STREQUAL "xMSVC") - if(MSVC_TOOLSET_VERSION GREATER_EQUAL 150) - # Not yet known. - set(_boost_COMPILER "") - elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 140) - # MSVC toolset 14.x versions are forward compatible. - set(_boost_COMPILER "") - foreach(v 9 8 7 6 5 4 3 2 1 0) - if(MSVC_TOOLSET_VERSION GREATER_EQUAL 14${v}) - list(APPEND _boost_COMPILER "-vc14${v}") - endif() - endforeach() - elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 80) - set(_boost_COMPILER "-vc${MSVC_TOOLSET_VERSION}") - elseif(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 13.10) - set(_boost_COMPILER "-vc71") - elseif(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 13) # Good luck! - set(_boost_COMPILER "-vc7") # yes, this is correct - else() # VS 6.0 Good luck! - set(_boost_COMPILER "-vc6") # yes, this is correct - endif() - - if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xClang") - string(REPLACE "." ";" VERSION_LIST "${CMAKE_CXX_COMPILER_VERSION}") - list(GET VERSION_LIST 0 CLANG_VERSION_MAJOR) - set(_boost_COMPILER "-clangw${CLANG_VERSION_MAJOR};${_boost_COMPILER}") - endif() - elseif (BORLAND) - set(_boost_COMPILER "-bcb") - elseif(CMAKE_CXX_COMPILER_ID STREQUAL "SunPro") - set(_boost_COMPILER "-sw") - elseif(CMAKE_CXX_COMPILER_ID STREQUAL "XL") - set(_boost_COMPILER "-xlc") - elseif (MINGW) - if(Boost_VERSION_STRING VERSION_LESS 1.34) - set(_boost_COMPILER "-mgw") # no GCC version encoding prior to 1.34 - else() - _Boost_COMPILER_DUMPVERSION(_boost_COMPILER_VERSION _boost_COMPILER_VERSION_MAJOR _boost_COMPILER_VERSION_MINOR) - set(_boost_COMPILER "-mgw${_boost_COMPILER_VERSION}") - endif() - elseif (UNIX) - _Boost_COMPILER_DUMPVERSION(_boost_COMPILER_VERSION _boost_COMPILER_VERSION_MAJOR _boost_COMPILER_VERSION_MINOR) - if(NOT Boost_VERSION_STRING VERSION_LESS 1.69.0) - # From GCC 5 and clang 4, versioning changes and minor becomes patch. - # For those compilers, patch is exclude from compiler tag in Boost 1.69+ library naming. - if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND _boost_COMPILER_VERSION_MAJOR VERSION_GREATER 4) - set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}") - elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang" AND _boost_COMPILER_VERSION_MAJOR VERSION_GREATER 3) - set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}") - endif() - endif() - - if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") - if(Boost_VERSION_STRING VERSION_LESS 1.34) - set(_boost_COMPILER "-gcc") # no GCC version encoding prior to 1.34 - else() - # Determine which version of GCC we have. - if(APPLE) - if(Boost_VERSION_STRING VERSION_LESS 1.36.0) - # In Boost <= 1.35.0, there is no mangled compiler name for - # the macOS/Darwin version of GCC. - set(_boost_COMPILER "") - else() - # In Boost 1.36.0 and newer, the mangled compiler name used - # on macOS/Darwin is "xgcc". - set(_boost_COMPILER "-xgcc${_boost_COMPILER_VERSION}") - endif() - else() - set(_boost_COMPILER "-gcc${_boost_COMPILER_VERSION}") - endif() - endif() - elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") - # TODO: Find out any Boost version constraints vs clang support. - set(_boost_COMPILER "-clang${_boost_COMPILER_VERSION}") - endif() - else() - set(_boost_COMPILER "") - endif() - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "_boost_COMPILER" SOURCE "guessed") - set(${_ret} ${_boost_COMPILER} PARENT_SCOPE) -endfunction() - -# -# Get component dependencies. Requires the dependencies to have been -# defined for the Boost release version. -# -# component - the component to check -# _ret - list of library dependencies -# -function(_Boost_COMPONENT_DEPENDENCIES component _ret) - # Note: to add a new Boost release, run - # - # % cmake -DBOOST_DIR=/path/to/boost/source -P Utilities/Scripts/BoostScanDeps.cmake - # - # The output may be added in a new block below. If it's the same as - # the previous release, simply update the version range of the block - # for the previous release. Also check if any new components have - # been added, and add any new components to - # _Boost_COMPONENT_HEADERS. - # - # This information was originally generated by running - # BoostScanDeps.cmake against every boost release to date supported - # by FindBoost: - # - # % for version in /path/to/boost/sources/* - # do - # cmake -DBOOST_DIR=$version -P Utilities/Scripts/BoostScanDeps.cmake - # done - # - # The output was then updated by search and replace with these regexes: - # - # - Strip message(STATUS) prefix dashes - # s;^-- ;; - # - Indent - # s;^set(; set(;; - # - Add conditionals - # s;Scanning /path/to/boost/sources/boost_\(.*\)_\(.*\)_\(.*); elseif(NOT Boost_VERSION_STRING VERSION_LESS \1\.\2\.\3 AND Boost_VERSION_STRING VERSION_LESS xxxx); - # - # This results in the logic seen below, but will require the xxxx - # replacing with the following Boost release version (or the next - # minor version to be released, e.g. 1.59 was the latest at the time - # of writing, making 1.60 the next. Identical consecutive releases - # were then merged together by updating the end range of the first - # block and removing the following redundant blocks. - # - # Running the script against all historical releases should be - # required only if the BoostScanDeps.cmake script logic is changed. - # The addition of a new release should only require it to be run - # against the new release. - - # Handle Python version suffixes - if(component MATCHES "^(python|mpi_python|numpy)([0-9][0-9]?|[0-9]\\.[0-9])\$") - set(component "${CMAKE_MATCH_1}") - set(component_python_version "${CMAKE_MATCH_2}") - endif() - - set(_Boost_IMPORTED_TARGETS TRUE) - if(Boost_VERSION_STRING AND Boost_VERSION_STRING VERSION_LESS 1.33.0) - message(WARNING "Imported targets and dependency information not available for Boost version ${Boost_VERSION_STRING} (all versions older than 1.33)") - set(_Boost_IMPORTED_TARGETS FALSE) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.33.0 AND Boost_VERSION_STRING VERSION_LESS 1.35.0) - set(_Boost_IOSTREAMS_DEPENDENCIES regex thread) - set(_Boost_REGEX_DEPENDENCIES thread) - set(_Boost_WAVE_DEPENDENCIES filesystem thread) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.35.0 AND Boost_VERSION_STRING VERSION_LESS 1.36.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_WAVE_DEPENDENCIES filesystem system thread) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.36.0 AND Boost_VERSION_STRING VERSION_LESS 1.38.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_WAVE_DEPENDENCIES filesystem system thread) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.38.0 AND Boost_VERSION_STRING VERSION_LESS 1.43.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_WAVE_DEPENDENCIES filesystem system thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.43.0 AND Boost_VERSION_STRING VERSION_LESS 1.44.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_WAVE_DEPENDENCIES filesystem system thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.44.0 AND Boost_VERSION_STRING VERSION_LESS 1.45.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random serialization) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_WAVE_DEPENDENCIES serialization filesystem system thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.45.0 AND Boost_VERSION_STRING VERSION_LESS 1.47.0) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.47.0 AND Boost_VERSION_STRING VERSION_LESS 1.48.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.48.0 AND Boost_VERSION_STRING VERSION_LESS 1.50.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES date_time) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.50.0 AND Boost_VERSION_STRING VERSION_LESS 1.53.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.53.0 AND Boost_VERSION_STRING VERSION_LESS 1.54.0) - set(_Boost_ATOMIC_DEPENDENCIES thread chrono system date_time) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.54.0 AND Boost_VERSION_STRING VERSION_LESS 1.55.0) - set(_Boost_ATOMIC_DEPENDENCIES thread chrono system date_time) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.55.0 AND Boost_VERSION_STRING VERSION_LESS 1.56.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.56.0 AND Boost_VERSION_STRING VERSION_LESS 1.59.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.59.0 AND Boost_VERSION_STRING VERSION_LESS 1.60.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.60.0 AND Boost_VERSION_STRING VERSION_LESS 1.61.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.61.0 AND Boost_VERSION_STRING VERSION_LESS 1.62.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.62.0 AND Boost_VERSION_STRING VERSION_LESS 1.63.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.63.0 AND Boost_VERSION_STRING VERSION_LESS 1.65.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_COROUTINE2_DEPENDENCIES context fiber thread chrono system date_time) - set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.65.0 AND Boost_VERSION_STRING VERSION_LESS 1.67.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.67.0 AND Boost_VERSION_STRING VERSION_LESS 1.68.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.68.0 AND Boost_VERSION_STRING VERSION_LESS 1.69.0) - set(_Boost_CHRONO_DEPENDENCIES system) - set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) - set(_Boost_CONTRACT_DEPENDENCIES thread chrono system date_time) - set(_Boost_COROUTINE_DEPENDENCIES context system) - set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) - set(_Boost_FILESYSTEM_DEPENDENCIES system) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) - set(_Boost_RANDOM_DEPENDENCIES system) - set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.69.0 AND Boost_VERSION_STRING VERSION_LESS 1.70.0) - set(_Boost_CONTRACT_DEPENDENCIES thread chrono date_time) - set(_Boost_COROUTINE_DEPENDENCIES context) - set(_Boost_FIBER_DEPENDENCIES context) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) - set(_Boost_THREAD_DEPENDENCIES chrono date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono system) - set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.70.0) - set(_Boost_CONTRACT_DEPENDENCIES thread chrono date_time) - set(_Boost_COROUTINE_DEPENDENCIES context) - set(_Boost_FIBER_DEPENDENCIES context) - set(_Boost_IOSTREAMS_DEPENDENCIES regex) - set(_Boost_LOG_DEPENDENCIES date_time log_setup filesystem thread regex chrono atomic) - set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) - set(_Boost_MPI_DEPENDENCIES serialization) - set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) - set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) - set(_Boost_THREAD_DEPENDENCIES chrono date_time atomic) - set(_Boost_TIMER_DEPENDENCIES chrono) - set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic) - set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) - if(NOT Boost_VERSION_STRING VERSION_LESS 1.72.0) - message(WARNING "New Boost version may have incorrect or missing dependencies and imported targets") - endif() - endif() - - string(TOUPPER ${component} uppercomponent) - set(${_ret} ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) - set(_Boost_IMPORTED_TARGETS ${_Boost_IMPORTED_TARGETS} PARENT_SCOPE) - - string(REGEX REPLACE ";" " " _boost_DEPS_STRING "${_Boost_${uppercomponent}_DEPENDENCIES}") - if (NOT _boost_DEPS_STRING) - set(_boost_DEPS_STRING "(none)") - endif() - # message(STATUS "Dependencies for Boost::${component}: ${_boost_DEPS_STRING}") -endfunction() - -# -# Get component headers. This is the primary header (or headers) for -# a given component, and is used to check that the headers are present -# as well as the library itself as an extra sanity check of the build -# environment. -# -# component - the component to check -# _hdrs -# -function(_Boost_COMPONENT_HEADERS component _hdrs) - # Handle Python version suffixes - if(component MATCHES "^(python|mpi_python|numpy)([0-9][0-9]?|[0-9]\\.[0-9])\$") - set(component "${CMAKE_MATCH_1}") - set(component_python_version "${CMAKE_MATCH_2}") - endif() - - # Note: new boost components will require adding here. The header - # must be present in all versions of Boost providing a library. - set(_Boost_ATOMIC_HEADERS "boost/atomic.hpp") - set(_Boost_CHRONO_HEADERS "boost/chrono.hpp") - set(_Boost_CONTAINER_HEADERS "boost/container/container_fwd.hpp") - set(_Boost_CONTRACT_HEADERS "boost/contract.hpp") - if(Boost_VERSION_STRING VERSION_LESS 1.61.0) - set(_Boost_CONTEXT_HEADERS "boost/context/all.hpp") - else() - set(_Boost_CONTEXT_HEADERS "boost/context/detail/fcontext.hpp") - endif() - set(_Boost_COROUTINE_HEADERS "boost/coroutine/all.hpp") - set(_Boost_DATE_TIME_HEADERS "boost/date_time/date.hpp") - set(_Boost_EXCEPTION_HEADERS "boost/exception/exception.hpp") - set(_Boost_FIBER_HEADERS "boost/fiber/all.hpp") - set(_Boost_FILESYSTEM_HEADERS "boost/filesystem/path.hpp") - set(_Boost_GRAPH_HEADERS "boost/graph/adjacency_list.hpp") - set(_Boost_GRAPH_PARALLEL_HEADERS "boost/graph/adjacency_list.hpp") - set(_Boost_IOSTREAMS_HEADERS "boost/iostreams/stream.hpp") - set(_Boost_LOCALE_HEADERS "boost/locale.hpp") - set(_Boost_LOG_HEADERS "boost/log/core.hpp") - set(_Boost_LOG_SETUP_HEADERS "boost/log/detail/setup_config.hpp") - set(_Boost_MATH_HEADERS "boost/math_fwd.hpp") - set(_Boost_MATH_C99_HEADERS "boost/math/tr1.hpp") - set(_Boost_MATH_C99F_HEADERS "boost/math/tr1.hpp") - set(_Boost_MATH_C99L_HEADERS "boost/math/tr1.hpp") - set(_Boost_MATH_TR1_HEADERS "boost/math/tr1.hpp") - set(_Boost_MATH_TR1F_HEADERS "boost/math/tr1.hpp") - set(_Boost_MATH_TR1L_HEADERS "boost/math/tr1.hpp") - set(_Boost_MPI_HEADERS "boost/mpi.hpp") - set(_Boost_MPI_PYTHON_HEADERS "boost/mpi/python/config.hpp") - set(_Boost_NUMPY_HEADERS "boost/python/numpy.hpp") - set(_Boost_PRG_EXEC_MONITOR_HEADERS "boost/test/prg_exec_monitor.hpp") - set(_Boost_PROGRAM_OPTIONS_HEADERS "boost/program_options.hpp") - set(_Boost_PYTHON_HEADERS "boost/python.hpp") - set(_Boost_RANDOM_HEADERS "boost/random.hpp") - set(_Boost_REGEX_HEADERS "boost/regex.hpp") - set(_Boost_SERIALIZATION_HEADERS "boost/serialization/serialization.hpp") - set(_Boost_SIGNALS_HEADERS "boost/signals.hpp") - set(_Boost_STACKTRACE_ADDR2LINE_HEADERS "boost/stacktrace.hpp") - set(_Boost_STACKTRACE_BACKTRACE_HEADERS "boost/stacktrace.hpp") - set(_Boost_STACKTRACE_BASIC_HEADERS "boost/stacktrace.hpp") - set(_Boost_STACKTRACE_NOOP_HEADERS "boost/stacktrace.hpp") - set(_Boost_STACKTRACE_WINDBG_CACHED_HEADERS "boost/stacktrace.hpp") - set(_Boost_STACKTRACE_WINDBG_HEADERS "boost/stacktrace.hpp") - set(_Boost_SYSTEM_HEADERS "boost/system/config.hpp") - set(_Boost_TEST_EXEC_MONITOR_HEADERS "boost/test/test_exec_monitor.hpp") - set(_Boost_THREAD_HEADERS "boost/thread.hpp") - set(_Boost_TIMER_HEADERS "boost/timer.hpp") - set(_Boost_TYPE_ERASURE_HEADERS "boost/type_erasure/config.hpp") - set(_Boost_UNIT_TEST_FRAMEWORK_HEADERS "boost/test/framework.hpp") - set(_Boost_WAVE_HEADERS "boost/wave.hpp") - set(_Boost_WSERIALIZATION_HEADERS "boost/archive/text_wiarchive.hpp") - if(WIN32) - set(_Boost_BZIP2_HEADERS "boost/iostreams/filter/bzip2.hpp") - set(_Boost_ZLIB_HEADERS "boost/iostreams/filter/zlib.hpp") - endif() - - string(TOUPPER ${component} uppercomponent) - set(${_hdrs} ${_Boost_${uppercomponent}_HEADERS} PARENT_SCOPE) - - string(REGEX REPLACE ";" " " _boost_HDRS_STRING "${_Boost_${uppercomponent}_HEADERS}") - if (NOT _boost_HDRS_STRING) - set(_boost_HDRS_STRING "(none)") - endif() - # message(STATUS "Headers for Boost::${component}: ${_boost_HDRS_STRING}") -endfunction() - -# -# Determine if any missing dependencies require adding to the component list. -# -# Sets _Boost_${COMPONENT}_DEPENDENCIES for each required component, -# plus _Boost_IMPORTED_TARGETS (TRUE if imported targets should be -# defined; FALSE if dependency information is unavailable). -# -# componentvar - the component list variable name -# extravar - the indirect dependency list variable name -# -# -function(_Boost_MISSING_DEPENDENCIES componentvar extravar) - # _boost_unprocessed_components - list of components requiring processing - # _boost_processed_components - components already processed (or currently being processed) - # _boost_new_components - new components discovered for future processing - # - list(APPEND _boost_unprocessed_components ${${componentvar}}) - - while(_boost_unprocessed_components) - list(APPEND _boost_processed_components ${_boost_unprocessed_components}) - foreach(component ${_boost_unprocessed_components}) - string(TOUPPER ${component} uppercomponent) - set(${_ret} ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) - _Boost_COMPONENT_DEPENDENCIES("${component}" _Boost_${uppercomponent}_DEPENDENCIES) - set(_Boost_${uppercomponent}_DEPENDENCIES ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) - set(_Boost_IMPORTED_TARGETS ${_Boost_IMPORTED_TARGETS} PARENT_SCOPE) - foreach(componentdep ${_Boost_${uppercomponent}_DEPENDENCIES}) - if (NOT ("${componentdep}" IN_LIST _boost_processed_components OR "${componentdep}" IN_LIST _boost_new_components)) - list(APPEND _boost_new_components ${componentdep}) - endif() - endforeach() - endforeach() - set(_boost_unprocessed_components ${_boost_new_components}) - unset(_boost_new_components) - endwhile() - set(_boost_extra_components ${_boost_processed_components}) - if(_boost_extra_components AND ${componentvar}) - list(REMOVE_ITEM _boost_extra_components ${${componentvar}}) - endif() - set(${componentvar} ${_boost_processed_components} PARENT_SCOPE) - set(${extravar} ${_boost_extra_components} PARENT_SCOPE) -endfunction() - -# -# Some boost libraries may require particular set of compler features. -# The very first one was `boost::fiber` introduced in Boost 1.62. -# One can check required compiler features of it in -# - `${Boost_ROOT}/libs/fiber/build/Jamfile.v2`; -# - `${Boost_ROOT}/libs/context/build/Jamfile.v2`. -# -# TODO (Re)Check compiler features on (every?) release ??? -# One may use the following command to get the files to check: -# -# $ find . -name Jamfile.v2 | grep build | xargs grep -l cxx1 -# -function(_Boost_COMPILER_FEATURES component _ret) - # Boost >= 1.62 - if(NOT Boost_VERSION_STRING VERSION_LESS 1.62.0) - set(_Boost_FIBER_COMPILER_FEATURES - cxx_alias_templates - cxx_auto_type - cxx_constexpr - cxx_defaulted_functions - cxx_final - cxx_lambdas - cxx_noexcept - cxx_nullptr - cxx_rvalue_references - cxx_thread_local - cxx_variadic_templates - ) - # Compiler feature for `context` same as for `fiber`. - set(_Boost_CONTEXT_COMPILER_FEATURES ${_Boost_FIBER_COMPILER_FEATURES}) - endif() - - # Boost Contract library available in >= 1.67 - if(NOT Boost_VERSION_STRING VERSION_LESS 1.67.0) - # From `libs/contract/build/boost_contract_build.jam` - set(_Boost_CONTRACT_COMPILER_FEATURES - cxx_lambdas - cxx_variadic_templates - ) - endif() - - string(TOUPPER ${component} uppercomponent) - set(${_ret} ${_Boost_${uppercomponent}_COMPILER_FEATURES} PARENT_SCOPE) -endfunction() - -# -# Update library search directory hint variable with paths used by prebuilt boost binaries. -# -# Prebuilt windows binaries (https://sourceforge.net/projects/boost/files/boost-binaries/) -# have library directories named using MSVC compiler version and architecture. -# This function would append corresponding directories if MSVC is a current compiler, -# so having `BOOST_ROOT` would be enough to specify to find everything. -# -function(_Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS componentlibvar basedir) - if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC") - if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(_arch_suffix 64) - else() - set(_arch_suffix 32) - endif() - if(MSVC_TOOLSET_VERSION GREATER_EQUAL 150) - # Not yet known. - elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 140) - # MSVC toolset 14.x versions are forward compatible. - foreach(v 9 8 7 6 5 4 3 2 1 0) - if(MSVC_TOOLSET_VERSION GREATER_EQUAL 14${v}) - list(APPEND ${componentlibvar} ${basedir}/lib${_arch_suffix}-msvc-14.${v}) - endif() - endforeach() - elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 80) - math(EXPR _toolset_major_version "${MSVC_TOOLSET_VERSION} / 10") - list(APPEND ${componentlibvar} ${basedir}/lib${_arch_suffix}-msvc-${_toolset_major_version}.0) - endif() - set(${componentlibvar} ${${componentlibvar}} PARENT_SCOPE) - endif() -endfunction() - -# -# End functions/macros -# -#------------------------------------------------------------------------------- - -#------------------------------------------------------------------------------- -# main. -#------------------------------------------------------------------------------- - - -# If the user sets Boost_LIBRARY_DIR, use it as the default for both -# configurations. -if(NOT Boost_LIBRARY_DIR_RELEASE AND Boost_LIBRARY_DIR) - set(Boost_LIBRARY_DIR_RELEASE "${Boost_LIBRARY_DIR}") -endif() -if(NOT Boost_LIBRARY_DIR_DEBUG AND Boost_LIBRARY_DIR) - set(Boost_LIBRARY_DIR_DEBUG "${Boost_LIBRARY_DIR}") -endif() - -if(NOT DEFINED Boost_USE_DEBUG_LIBS) - set(Boost_USE_DEBUG_LIBS TRUE) -endif() -if(NOT DEFINED Boost_USE_RELEASE_LIBS) - set(Boost_USE_RELEASE_LIBS TRUE) -endif() -if(NOT DEFINED Boost_USE_MULTITHREADED) - set(Boost_USE_MULTITHREADED TRUE) -endif() -if(NOT DEFINED Boost_USE_DEBUG_RUNTIME) - set(Boost_USE_DEBUG_RUNTIME TRUE) -endif() - -# Check the version of Boost against the requested version. -if(Boost_FIND_VERSION AND NOT Boost_FIND_VERSION_MINOR) - message(SEND_ERROR "When requesting a specific version of Boost, you must provide at least the major and minor version numbers, e.g., 1.34") -endif() - -if(Boost_FIND_VERSION_EXACT) - # The version may appear in a directory with or without the patch - # level, even when the patch level is non-zero. - set(_boost_TEST_VERSIONS - "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}.${Boost_FIND_VERSION_PATCH}" - "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}") -else() - # The user has not requested an exact version. Among known - # versions, find those that are acceptable to the user request. - # - # Note: When adding a new Boost release, also update the dependency - # information in _Boost_COMPONENT_DEPENDENCIES and - # _Boost_COMPONENT_HEADERS. See the instructions at the top of - # _Boost_COMPONENT_DEPENDENCIES. - set(_Boost_KNOWN_VERSIONS ${Boost_ADDITIONAL_VERSIONS} - "1.71.0" "1.71" "1.70.0" "1.70" "1.69.0" "1.69" - "1.68.0" "1.68" "1.67.0" "1.67" "1.66.0" "1.66" "1.65.1" "1.65.0" "1.65" - "1.64.0" "1.64" "1.63.0" "1.63" "1.62.0" "1.62" "1.61.0" "1.61" "1.60.0" "1.60" - "1.59.0" "1.59" "1.58.0" "1.58" "1.57.0" "1.57" "1.56.0" "1.56" "1.55.0" "1.55" - "1.54.0" "1.54" "1.53.0" "1.53" "1.52.0" "1.52" "1.51.0" "1.51" - "1.50.0" "1.50" "1.49.0" "1.49" "1.48.0" "1.48" "1.47.0" "1.47" "1.46.1" - "1.46.0" "1.46" "1.45.0" "1.45" "1.44.0" "1.44" "1.43.0" "1.43" "1.42.0" "1.42" - "1.41.0" "1.41" "1.40.0" "1.40" "1.39.0" "1.39" "1.38.0" "1.38" "1.37.0" "1.37" - "1.36.1" "1.36.0" "1.36" "1.35.1" "1.35.0" "1.35" "1.34.1" "1.34.0" - "1.34" "1.33.1" "1.33.0" "1.33") - - set(_boost_TEST_VERSIONS) - if(Boost_FIND_VERSION) - set(_Boost_FIND_VERSION_SHORT "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}") - # Select acceptable versions. - foreach(version ${_Boost_KNOWN_VERSIONS}) - if(NOT "${version}" VERSION_LESS "${Boost_FIND_VERSION}") - # This version is high enough. - list(APPEND _boost_TEST_VERSIONS "${version}") - elseif("${version}.99" VERSION_EQUAL "${_Boost_FIND_VERSION_SHORT}.99") - # This version is a short-form for the requested version with - # the patch level dropped. - list(APPEND _boost_TEST_VERSIONS "${version}") - endif() - endforeach() - else() - # Any version is acceptable. - set(_boost_TEST_VERSIONS "${_Boost_KNOWN_VERSIONS}") - endif() -endif() - -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_TEST_VERSIONS") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_MULTITHREADED") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_STATIC_LIBS") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_STATIC_RUNTIME") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_ADDITIONAL_VERSIONS") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_NO_SYSTEM_PATHS") - -# Supply Boost_LIB_DIAGNOSTIC_DEFINITIONS as a convenience target. It -# will only contain any interface definitions on WIN32, but is created -# on all platforms to keep end user code free from platform dependent -# code. Also provide convenience targets to disable autolinking and -# enable dynamic linking. -if(NOT TARGET Boost::diagnostic_definitions) - add_library(Boost::diagnostic_definitions INTERFACE IMPORTED) - add_library(Boost::disable_autolinking INTERFACE IMPORTED) - add_library(Boost::dynamic_linking INTERFACE IMPORTED) - set_target_properties(Boost::dynamic_linking PROPERTIES - INTERFACE_COMPILE_DEFINITIONS "BOOST_ALL_DYN_LINK") -endif() -if(WIN32) - # In windows, automatic linking is performed, so you do not have - # to specify the libraries. If you are linking to a dynamic - # runtime, then you can choose to link to either a static or a - # dynamic Boost library, the default is to do a static link. You - # can alter this for a specific library "whatever" by defining - # BOOST_WHATEVER_DYN_LINK to force Boost library "whatever" to be - # linked dynamically. Alternatively you can force all Boost - # libraries to dynamic link by defining BOOST_ALL_DYN_LINK. - - # This feature can be disabled for Boost library "whatever" by - # defining BOOST_WHATEVER_NO_LIB, or for all of Boost by defining - # BOOST_ALL_NO_LIB. - - # If you want to observe which libraries are being linked against - # then defining BOOST_LIB_DIAGNOSTIC will cause the auto-linking - # code to emit a #pragma message each time a library is selected - # for linking. - set(Boost_LIB_DIAGNOSTIC_DEFINITIONS "-DBOOST_LIB_DIAGNOSTIC") - set_target_properties(Boost::diagnostic_definitions PROPERTIES - INTERFACE_COMPILE_DEFINITIONS "BOOST_LIB_DIAGNOSTIC") - set_target_properties(Boost::disable_autolinking PROPERTIES - INTERFACE_COMPILE_DEFINITIONS "BOOST_ALL_NO_LIB") -endif() - -# Patch for GTSAM: -if (POLICY CMP0074) - cmake_policy(GET CMP0074 _Boost_CMP0074) -endif() - -if(NOT "x${_Boost_CMP0074}x" STREQUAL "xNEWx") - _Boost_CHECK_SPELLING(Boost_ROOT) -endif() -unset(_Boost_CMP0074) -_Boost_CHECK_SPELLING(Boost_LIBRARYDIR) -_Boost_CHECK_SPELLING(Boost_INCLUDEDIR) - -# Collect environment variable inputs as hints. Do not consider changes. -foreach(v BOOSTROOT BOOST_ROOT BOOST_INCLUDEDIR BOOST_LIBRARYDIR) - set(_env $ENV{${v}}) - if(_env) - file(TO_CMAKE_PATH "${_env}" _ENV_${v}) - else() - set(_ENV_${v} "") - endif() -endforeach() -if(NOT _ENV_BOOST_ROOT AND _ENV_BOOSTROOT) - set(_ENV_BOOST_ROOT "${_ENV_BOOSTROOT}") -endif() - -# Collect inputs and cached results. Detect changes since the last run. -if(NOT BOOST_ROOT AND BOOSTROOT) - set(BOOST_ROOT "${BOOSTROOT}") -endif() -set(_Boost_VARS_DIR - BOOST_ROOT - Boost_NO_SYSTEM_PATHS - ) - -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_ROOT") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_ROOT" ENVIRONMENT) -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_INCLUDEDIR") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_INCLUDEDIR" ENVIRONMENT) -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_LIBRARYDIR") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_LIBRARYDIR" ENVIRONMENT) - -# ------------------------------------------------------------------------ -# Search for Boost include DIR -# ------------------------------------------------------------------------ - -set(_Boost_VARS_INC BOOST_INCLUDEDIR Boost_INCLUDE_DIR Boost_ADDITIONAL_VERSIONS) -_Boost_CHANGE_DETECT(_Boost_CHANGE_INCDIR ${_Boost_VARS_DIR} ${_Boost_VARS_INC}) -# Clear Boost_INCLUDE_DIR if it did not change but other input affecting the -# location did. We will find a new one based on the new inputs. -if(_Boost_CHANGE_INCDIR AND NOT _Boost_INCLUDE_DIR_CHANGED) - unset(Boost_INCLUDE_DIR CACHE) -endif() - -if(NOT Boost_INCLUDE_DIR) - set(_boost_INCLUDE_SEARCH_DIRS "") - if(BOOST_INCLUDEDIR) - list(APPEND _boost_INCLUDE_SEARCH_DIRS ${BOOST_INCLUDEDIR}) - elseif(_ENV_BOOST_INCLUDEDIR) - list(APPEND _boost_INCLUDE_SEARCH_DIRS ${_ENV_BOOST_INCLUDEDIR}) - endif() - - if( BOOST_ROOT ) - list(APPEND _boost_INCLUDE_SEARCH_DIRS ${BOOST_ROOT}/include ${BOOST_ROOT}) - elseif( _ENV_BOOST_ROOT ) - list(APPEND _boost_INCLUDE_SEARCH_DIRS ${_ENV_BOOST_ROOT}/include ${_ENV_BOOST_ROOT}) - endif() - - if( Boost_NO_SYSTEM_PATHS) - list(APPEND _boost_INCLUDE_SEARCH_DIRS NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH) - else() - if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC") - foreach(ver ${_boost_TEST_VERSIONS}) - string(REPLACE "." "_" ver "${ver}") - list(APPEND _boost_INCLUDE_SEARCH_DIRS PATHS "C:/local/boost_${ver}") - endforeach() - endif() - list(APPEND _boost_INCLUDE_SEARCH_DIRS PATHS - C:/boost/include - C:/boost - /sw/local/include - ) - endif() - - # Try to find Boost by stepping backwards through the Boost versions - # we know about. - # Build a list of path suffixes for each version. - set(_boost_PATH_SUFFIXES) - foreach(_boost_VER ${_boost_TEST_VERSIONS}) - # Add in a path suffix, based on the required version, ideally - # we could read this from version.hpp, but for that to work we'd - # need to know the include dir already - set(_boost_BOOSTIFIED_VERSION) - - # Transform 1.35 => 1_35 and 1.36.0 => 1_36_0 - if(_boost_VER MATCHES "([0-9]+)\\.([0-9]+)\\.([0-9]+)") - set(_boost_BOOSTIFIED_VERSION - "${CMAKE_MATCH_1}_${CMAKE_MATCH_2}_${CMAKE_MATCH_3}") - elseif(_boost_VER MATCHES "([0-9]+)\\.([0-9]+)") - set(_boost_BOOSTIFIED_VERSION - "${CMAKE_MATCH_1}_${CMAKE_MATCH_2}") - endif() - - list(APPEND _boost_PATH_SUFFIXES - "boost-${_boost_BOOSTIFIED_VERSION}" - "boost_${_boost_BOOSTIFIED_VERSION}" - "boost/boost-${_boost_BOOSTIFIED_VERSION}" - "boost/boost_${_boost_BOOSTIFIED_VERSION}" - ) - - endforeach() - - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_INCLUDE_SEARCH_DIRS") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_PATH_SUFFIXES") - - # Look for a standard boost header file. - find_path(Boost_INCLUDE_DIR - NAMES boost/config.hpp - HINTS ${_boost_INCLUDE_SEARCH_DIRS} - PATH_SUFFIXES ${_boost_PATH_SUFFIXES} - ) -endif() - -# ------------------------------------------------------------------------ -# Extract version information from version.hpp -# ------------------------------------------------------------------------ - -if(Boost_INCLUDE_DIR) - _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "location of version.hpp: ${Boost_INCLUDE_DIR}/boost/version.hpp") - - # Extract Boost_VERSION_MACRO and Boost_LIB_VERSION from version.hpp - set(Boost_VERSION_MACRO 0) - set(Boost_LIB_VERSION "") - file(STRINGS "${Boost_INCLUDE_DIR}/boost/version.hpp" _boost_VERSION_HPP_CONTENTS REGEX "#define BOOST_(LIB_)?VERSION ") - if("${_boost_VERSION_HPP_CONTENTS}" MATCHES "#define BOOST_VERSION ([0-9]+)") - set(Boost_VERSION_MACRO "${CMAKE_MATCH_1}") - endif() - if("${_boost_VERSION_HPP_CONTENTS}" MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"") - set(Boost_LIB_VERSION "${CMAKE_MATCH_1}") - endif() - unset(_boost_VERSION_HPP_CONTENTS) - - # Calculate version components - math(EXPR Boost_VERSION_MAJOR "${Boost_VERSION_MACRO} / 100000") - math(EXPR Boost_VERSION_MINOR "${Boost_VERSION_MACRO} / 100 % 1000") - math(EXPR Boost_VERSION_PATCH "${Boost_VERSION_MACRO} % 100") - set(Boost_VERSION_COUNT 3) - - # Define alias variables for backwards compat. - set(Boost_MAJOR_VERSION ${Boost_VERSION_MAJOR}) - set(Boost_MINOR_VERSION ${Boost_VERSION_MINOR}) - set(Boost_SUBMINOR_VERSION ${Boost_VERSION_PATCH}) - - # Define Boost version in x.y.z format - set(Boost_VERSION_STRING "${Boost_VERSION_MAJOR}.${Boost_VERSION_MINOR}.${Boost_VERSION_PATCH}") - - # Define final Boost_VERSION - if (POLICY CMP0093) - cmake_policy(GET CMP0093 _Boost_CMP0093 - PARENT_SCOPE # undocumented, do not use outside of CMake - ) - endif() - - if("x${_Boost_CMP0093}x" STREQUAL "xNEWx") - set(Boost_VERSION ${Boost_VERSION_STRING}) - else() - set(Boost_VERSION ${Boost_VERSION_MACRO}) - endif() - unset(_Boost_CMP0093) - - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_STRING") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MACRO") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MAJOR") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MINOR") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_PATCH") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_COUNT") -endif() - -# ------------------------------------------------------------------------ -# Prefix initialization -# ------------------------------------------------------------------------ - -set(Boost_LIB_PREFIX "") -if ( (GHSMULTI AND Boost_USE_STATIC_LIBS) OR - (WIN32 AND Boost_USE_STATIC_LIBS AND NOT CYGWIN) ) - set(Boost_LIB_PREFIX "lib") -endif() - -if ( NOT Boost_NAMESPACE ) - set(Boost_NAMESPACE "boost") -endif() - -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_LIB_PREFIX") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_NAMESPACE") - -# ------------------------------------------------------------------------ -# Suffix initialization and compiler suffix detection. -# ------------------------------------------------------------------------ - -set(_Boost_VARS_NAME - Boost_NAMESPACE - Boost_COMPILER - Boost_THREADAPI - Boost_USE_DEBUG_PYTHON - Boost_USE_MULTITHREADED - Boost_USE_STATIC_LIBS - Boost_USE_STATIC_RUNTIME - Boost_USE_STLPORT - Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS - ) -_Boost_CHANGE_DETECT(_Boost_CHANGE_LIBNAME ${_Boost_VARS_NAME}) - -# Setting some more suffixes for the library -if (Boost_COMPILER) - set(_boost_COMPILER ${Boost_COMPILER}) - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "_boost_COMPILER" SOURCE "user-specified via Boost_COMPILER") -else() - # Attempt to guess the compiler suffix - # NOTE: this is not perfect yet, if you experience any issues - # please report them and use the Boost_COMPILER variable - # to work around the problems. - _Boost_GUESS_COMPILER_PREFIX(_boost_COMPILER) -endif() - -set (_boost_MULTITHREADED "-mt") -if( NOT Boost_USE_MULTITHREADED ) - set (_boost_MULTITHREADED "") -endif() -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_MULTITHREADED") - -#====================== -# Systematically build up the Boost ABI tag for the 'tagged' and 'versioned' layouts -# http://boost.org/doc/libs/1_66_0/more/getting_started/windows.html#library-naming -# http://boost.org/doc/libs/1_66_0/boost/config/auto_link.hpp -# http://boost.org/doc/libs/1_66_0/tools/build/src/tools/common.jam -# http://boost.org/doc/libs/1_66_0/boostcpp.jam -set( _boost_RELEASE_ABI_TAG "-") -set( _boost_DEBUG_ABI_TAG "-") -# Key Use this library when: -# s linking statically to the C++ standard library and -# compiler runtime support libraries. -if(Boost_USE_STATIC_RUNTIME) - set( _boost_RELEASE_ABI_TAG "${_boost_RELEASE_ABI_TAG}s") - set( _boost_DEBUG_ABI_TAG "${_boost_DEBUG_ABI_TAG}s") -endif() -# g using debug versions of the standard and runtime -# support libraries -if(WIN32 AND Boost_USE_DEBUG_RUNTIME) - if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC" - OR "x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xClang" - OR "x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xIntel") - string(APPEND _boost_DEBUG_ABI_TAG "g") - endif() -endif() -# y using special debug build of python -if(Boost_USE_DEBUG_PYTHON) - string(APPEND _boost_DEBUG_ABI_TAG "y") -endif() -# d using a debug version of your code -string(APPEND _boost_DEBUG_ABI_TAG "d") -# p using the STLport standard library rather than the -# default one supplied with your compiler -if(Boost_USE_STLPORT) - string(APPEND _boost_RELEASE_ABI_TAG "p") - string(APPEND _boost_DEBUG_ABI_TAG "p") -endif() -# n using the STLport deprecated "native iostreams" feature -# removed from the documentation in 1.43.0 but still present in -# boost/config/auto_link.hpp -if(Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS) - string(APPEND _boost_RELEASE_ABI_TAG "n") - string(APPEND _boost_DEBUG_ABI_TAG "n") -endif() - -# -x86 Architecture and address model tag -# First character is the architecture, then word-size, either 32 or 64 -# Only used in 'versioned' layout, added in Boost 1.66.0 -if(DEFINED Boost_ARCHITECTURE) - set(_boost_ARCHITECTURE_TAG "${Boost_ARCHITECTURE}") - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "_boost_ARCHITECTURE_TAG" SOURCE "user-specified via Boost_ARCHITECTURE") -else() - set(_boost_ARCHITECTURE_TAG "") - # {CMAKE_CXX_COMPILER_ARCHITECTURE_ID} is not currently set for all compilers - if(NOT "x${CMAKE_CXX_COMPILER_ARCHITECTURE_ID}" STREQUAL "x" AND NOT Boost_VERSION_STRING VERSION_LESS 1.66.0) - string(APPEND _boost_ARCHITECTURE_TAG "-") - # This needs to be kept in-sync with the section of CMakePlatformId.h.in - # inside 'defined(_WIN32) && defined(_MSC_VER)' - if(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "IA64") - string(APPEND _boost_ARCHITECTURE_TAG "i") - elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "X86" - OR CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "x64") - string(APPEND _boost_ARCHITECTURE_TAG "x") - elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID MATCHES "^ARM") - string(APPEND _boost_ARCHITECTURE_TAG "a") - elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "MIPS") - string(APPEND _boost_ARCHITECTURE_TAG "m") - endif() - - if(CMAKE_SIZEOF_VOID_P EQUAL 8) - string(APPEND _boost_ARCHITECTURE_TAG "64") - else() - string(APPEND _boost_ARCHITECTURE_TAG "32") - endif() - endif() - _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "_boost_ARCHITECTURE_TAG" SOURCE "detected") -endif() - -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_RELEASE_ABI_TAG") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_DEBUG_ABI_TAG") - -# ------------------------------------------------------------------------ -# Begin finding boost libraries -# ------------------------------------------------------------------------ - -set(_Boost_VARS_LIB "") -foreach(c DEBUG RELEASE) - set(_Boost_VARS_LIB_${c} BOOST_LIBRARYDIR Boost_LIBRARY_DIR_${c}) - list(APPEND _Boost_VARS_LIB ${_Boost_VARS_LIB_${c}}) - _Boost_CHANGE_DETECT(_Boost_CHANGE_LIBDIR_${c} ${_Boost_VARS_DIR} ${_Boost_VARS_LIB_${c}} Boost_INCLUDE_DIR) - # Clear Boost_LIBRARY_DIR_${c} if it did not change but other input affecting the - # location did. We will find a new one based on the new inputs. - if(_Boost_CHANGE_LIBDIR_${c} AND NOT _Boost_LIBRARY_DIR_${c}_CHANGED) - unset(Boost_LIBRARY_DIR_${c} CACHE) - endif() - - # If Boost_LIBRARY_DIR_[RELEASE,DEBUG] is set, prefer its value. - if(Boost_LIBRARY_DIR_${c}) - set(_boost_LIBRARY_SEARCH_DIRS_${c} ${Boost_LIBRARY_DIR_${c}} NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) - else() - set(_boost_LIBRARY_SEARCH_DIRS_${c} "") - if(BOOST_LIBRARYDIR) - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${BOOST_LIBRARYDIR}) - elseif(_ENV_BOOST_LIBRARYDIR) - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${_ENV_BOOST_LIBRARYDIR}) - endif() - - if(BOOST_ROOT) - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${BOOST_ROOT}/lib ${BOOST_ROOT}/stage/lib) - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${BOOST_ROOT}") - elseif(_ENV_BOOST_ROOT) - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${_ENV_BOOST_ROOT}/lib ${_ENV_BOOST_ROOT}/stage/lib) - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${_ENV_BOOST_ROOT}") - endif() - - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} - ${Boost_INCLUDE_DIR}/lib - ${Boost_INCLUDE_DIR}/../lib - ${Boost_INCLUDE_DIR}/stage/lib - ) - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${Boost_INCLUDE_DIR}/..") - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${Boost_INCLUDE_DIR}") - if( Boost_NO_SYSTEM_PATHS ) - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH) - else() - foreach(ver ${_boost_TEST_VERSIONS}) - string(REPLACE "." "_" ver "${ver}") - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "C:/local/boost_${ver}") - endforeach() - _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "C:/boost") - list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} PATHS - C:/boost/lib - C:/boost - /sw/local/lib - ) - endif() - endif() -endforeach() - -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_LIBRARY_SEARCH_DIRS_RELEASE") -_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_LIBRARY_SEARCH_DIRS_DEBUG") - -# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES -if( Boost_USE_STATIC_LIBS ) - set( _boost_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES}) - if(WIN32) - list(INSERT CMAKE_FIND_LIBRARY_SUFFIXES 0 .lib .a) - else() - set(CMAKE_FIND_LIBRARY_SUFFIXES .a) - endif() -endif() - -# We want to use the tag inline below without risking double dashes -if(_boost_RELEASE_ABI_TAG) - if(${_boost_RELEASE_ABI_TAG} STREQUAL "-") - set(_boost_RELEASE_ABI_TAG "") - endif() -endif() -if(_boost_DEBUG_ABI_TAG) - if(${_boost_DEBUG_ABI_TAG} STREQUAL "-") - set(_boost_DEBUG_ABI_TAG "") - endif() -endif() - -# The previous behavior of FindBoost when Boost_USE_STATIC_LIBS was enabled -# on WIN32 was to: -# 1. Search for static libs compiled against a SHARED C++ standard runtime library (use if found) -# 2. Search for static libs compiled against a STATIC C++ standard runtime library (use if found) -# We maintain this behavior since changing it could break people's builds. -# To disable the ambiguous behavior, the user need only -# set Boost_USE_STATIC_RUNTIME either ON or OFF. -set(_boost_STATIC_RUNTIME_WORKAROUND false) -if(WIN32 AND Boost_USE_STATIC_LIBS) - if(NOT DEFINED Boost_USE_STATIC_RUNTIME) - set(_boost_STATIC_RUNTIME_WORKAROUND TRUE) - endif() -endif() - -# On versions < 1.35, remove the System library from the considered list -# since it wasn't added until 1.35. -if(Boost_VERSION_STRING AND Boost_FIND_COMPONENTS) - if(Boost_VERSION_STRING VERSION_LESS 1.35.0) - list(REMOVE_ITEM Boost_FIND_COMPONENTS system) - endif() -endif() - -# Additional components may be required via component dependencies. -# Add any missing components to the list. -_Boost_MISSING_DEPENDENCIES(Boost_FIND_COMPONENTS _Boost_EXTRA_FIND_COMPONENTS) - -# If thread is required, get the thread libs as a dependency -if("thread" IN_LIST Boost_FIND_COMPONENTS) - if(Boost_FIND_QUIETLY) - set(_Boost_find_quiet QUIET) - else() - set(_Boost_find_quiet "") - endif() - find_package(Threads ${_Boost_find_quiet}) - unset(_Boost_find_quiet) -endif() - -# If the user changed any of our control inputs flush previous results. -if(_Boost_CHANGE_LIBDIR_DEBUG OR _Boost_CHANGE_LIBDIR_RELEASE OR _Boost_CHANGE_LIBNAME) - foreach(COMPONENT ${_Boost_COMPONENTS_SEARCHED}) - string(TOUPPER ${COMPONENT} UPPERCOMPONENT) - foreach(c DEBUG RELEASE) - set(_var Boost_${UPPERCOMPONENT}_LIBRARY_${c}) - unset(${_var} CACHE) - set(${_var} "${_var}-NOTFOUND") - endforeach() - endforeach() - set(_Boost_COMPONENTS_SEARCHED "") -endif() - -foreach(COMPONENT ${Boost_FIND_COMPONENTS}) - string(TOUPPER ${COMPONENT} UPPERCOMPONENT) - - set( _boost_docstring_release "Boost ${COMPONENT} library (release)") - set( _boost_docstring_debug "Boost ${COMPONENT} library (debug)") - - # Compute component-specific hints. - set(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT "") - if(${COMPONENT} STREQUAL "mpi" OR ${COMPONENT} STREQUAL "mpi_python" OR - ${COMPONENT} STREQUAL "graph_parallel") - foreach(lib ${MPI_CXX_LIBRARIES} ${MPI_C_LIBRARIES}) - if(IS_ABSOLUTE "${lib}") - get_filename_component(libdir "${lib}" PATH) - string(REPLACE "\\" "/" libdir "${libdir}") - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT ${libdir}) - endif() - endforeach() - endif() - - # Handle Python version suffixes - unset(COMPONENT_PYTHON_VERSION_MAJOR) - unset(COMPONENT_PYTHON_VERSION_MINOR) - if(${COMPONENT} MATCHES "^(python|mpi_python|numpy)([0-9])\$") - set(COMPONENT_UNVERSIONED "${CMAKE_MATCH_1}") - set(COMPONENT_PYTHON_VERSION_MAJOR "${CMAKE_MATCH_2}") - elseif(${COMPONENT} MATCHES "^(python|mpi_python|numpy)([0-9])\\.?([0-9])\$") - set(COMPONENT_UNVERSIONED "${CMAKE_MATCH_1}") - set(COMPONENT_PYTHON_VERSION_MAJOR "${CMAKE_MATCH_2}") - set(COMPONENT_PYTHON_VERSION_MINOR "${CMAKE_MATCH_3}") - endif() - - unset(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) - if (COMPONENT_PYTHON_VERSION_MINOR) - # Boost >= 1.67 - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") - # Debian/Ubuntu (Some versions omit the 2 and/or 3 from the suffix) - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}-py${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-py${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") - # Gentoo - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-${COMPONENT_PYTHON_VERSION_MAJOR}.${COMPONENT_PYTHON_VERSION_MINOR}") - # RPMs - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") - endif() - if (COMPONENT_PYTHON_VERSION_MAJOR AND NOT COMPONENT_PYTHON_VERSION_MINOR) - # Boost < 1.67 - list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}") - endif() - - # Consolidate and report component-specific hints. - if(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) - list(REMOVE_DUPLICATES _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) - _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "Component-specific library search names for ${COMPONENT_NAME}: ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME}") - endif() - if(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) - list(REMOVE_DUPLICATES _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) - _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "Component-specific library search paths for ${COMPONENT}: ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT}") - endif() - - # - # Find headers - # - _Boost_COMPONENT_HEADERS("${COMPONENT}" Boost_${UPPERCOMPONENT}_HEADER_NAME) - # Look for a standard boost header file. - if(Boost_${UPPERCOMPONENT}_HEADER_NAME) - if(EXISTS "${Boost_INCLUDE_DIR}/${Boost_${UPPERCOMPONENT}_HEADER_NAME}") - set(Boost_${UPPERCOMPONENT}_HEADER ON) - else() - set(Boost_${UPPERCOMPONENT}_HEADER OFF) - endif() - else() - set(Boost_${UPPERCOMPONENT}_HEADER ON) - message(WARNING "No header defined for ${COMPONENT}; skipping header check") - endif() - - # - # Find RELEASE libraries - # - unset(_boost_RELEASE_NAMES) - foreach(component IN LISTS _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME COMPONENT) - foreach(compiler IN LISTS _boost_COMPILER) - list(APPEND _boost_RELEASE_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG} ) - endforeach() - list(APPEND _boost_RELEASE_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component} ) - if(_boost_STATIC_RUNTIME_WORKAROUND) - set(_boost_RELEASE_STATIC_ABI_TAG "-s${_boost_RELEASE_ABI_TAG}") - foreach(compiler IN LISTS _boost_COMPILER) - list(APPEND _boost_RELEASE_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG} ) - endforeach() - list(APPEND _boost_RELEASE_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG} ) - endif() - endforeach() - if(Boost_THREADAPI AND ${COMPONENT} STREQUAL "thread") - _Boost_PREPEND_LIST_WITH_THREADAPI(_boost_RELEASE_NAMES ${_boost_RELEASE_NAMES}) - endif() - _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "Searching for ${UPPERCOMPONENT}_LIBRARY_RELEASE: ${_boost_RELEASE_NAMES}") - - # if Boost_LIBRARY_DIR_RELEASE is not defined, - # but Boost_LIBRARY_DIR_DEBUG is, look there first for RELEASE libs - if(NOT Boost_LIBRARY_DIR_RELEASE AND Boost_LIBRARY_DIR_DEBUG) - list(INSERT _boost_LIBRARY_SEARCH_DIRS_RELEASE 0 ${Boost_LIBRARY_DIR_DEBUG}) - endif() - - # Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing. - string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_RELEASE}") - - if(Boost_USE_RELEASE_LIBS) - _Boost_FIND_LIBRARY(Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE RELEASE - NAMES ${_boost_RELEASE_NAMES} - HINTS ${_boost_LIBRARY_SEARCH_DIRS_tmp} - NAMES_PER_DIR - DOC "${_boost_docstring_release}" - ) - endif() - - # - # Find DEBUG libraries - # - unset(_boost_DEBUG_NAMES) - foreach(component IN LISTS _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME COMPONENT) - foreach(compiler IN LISTS _boost_COMPILER) - list(APPEND _boost_DEBUG_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG} ) - endforeach() - list(APPEND _boost_DEBUG_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component} ) - if(_boost_STATIC_RUNTIME_WORKAROUND) - set(_boost_DEBUG_STATIC_ABI_TAG "-s${_boost_DEBUG_ABI_TAG}") - foreach(compiler IN LISTS _boost_COMPILER) - list(APPEND _boost_DEBUG_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG} ) - endforeach() - list(APPEND _boost_DEBUG_NAMES - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} - ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG} ) - endif() - endforeach() - if(Boost_THREADAPI AND ${COMPONENT} STREQUAL "thread") - _Boost_PREPEND_LIST_WITH_THREADAPI(_boost_DEBUG_NAMES ${_boost_DEBUG_NAMES}) - endif() - _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" - "Searching for ${UPPERCOMPONENT}_LIBRARY_DEBUG: ${_boost_DEBUG_NAMES}") - - # if Boost_LIBRARY_DIR_DEBUG is not defined, - # but Boost_LIBRARY_DIR_RELEASE is, look there first for DEBUG libs - if(NOT Boost_LIBRARY_DIR_DEBUG AND Boost_LIBRARY_DIR_RELEASE) - list(INSERT _boost_LIBRARY_SEARCH_DIRS_DEBUG 0 ${Boost_LIBRARY_DIR_RELEASE}) - endif() - - # Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing. - string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_DEBUG}") - - if(Boost_USE_DEBUG_LIBS) - _Boost_FIND_LIBRARY(Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG DEBUG - NAMES ${_boost_DEBUG_NAMES} - HINTS ${_boost_LIBRARY_SEARCH_DIRS_tmp} - NAMES_PER_DIR - DOC "${_boost_docstring_debug}" - ) - endif () - - if(Boost_REALPATH) - _Boost_SWAP_WITH_REALPATH(Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE "${_boost_docstring_release}") - _Boost_SWAP_WITH_REALPATH(Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG "${_boost_docstring_debug}" ) - endif() - - _Boost_ADJUST_LIB_VARS(${UPPERCOMPONENT}) - - # Check if component requires some compiler features - _Boost_COMPILER_FEATURES(${COMPONENT} _Boost_${UPPERCOMPONENT}_COMPILER_FEATURES) - -endforeach() - -# Restore the original find library ordering -if( Boost_USE_STATIC_LIBS ) - set(CMAKE_FIND_LIBRARY_SUFFIXES ${_boost_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES}) -endif() - -# ------------------------------------------------------------------------ -# End finding boost libraries -# ------------------------------------------------------------------------ - -set(Boost_INCLUDE_DIRS ${Boost_INCLUDE_DIR}) -set(Boost_LIBRARY_DIRS) -if(Boost_LIBRARY_DIR_RELEASE) - list(APPEND Boost_LIBRARY_DIRS ${Boost_LIBRARY_DIR_RELEASE}) -endif() -if(Boost_LIBRARY_DIR_DEBUG) - list(APPEND Boost_LIBRARY_DIRS ${Boost_LIBRARY_DIR_DEBUG}) -endif() -if(Boost_LIBRARY_DIRS) - list(REMOVE_DUPLICATES Boost_LIBRARY_DIRS) -endif() - -# ------------------------------------------------------------------------ -# Call FPHSA helper, see https://cmake.org/cmake/help/latest/module/FindPackageHandleStandardArgs.html -# ------------------------------------------------------------------------ - -# Define aliases as needed by the component handler in the FPHSA helper below -foreach(_comp IN LISTS Boost_FIND_COMPONENTS) - string(TOUPPER ${_comp} _uppercomp) - if(DEFINED Boost_${_uppercomp}_FOUND) - set(Boost_${_comp}_FOUND ${Boost_${_uppercomp}_FOUND}) - endif() -endforeach() - -find_package_handle_standard_args(Boost - REQUIRED_VARS Boost_INCLUDE_DIR - VERSION_VAR Boost_VERSION_STRING - HANDLE_COMPONENTS) - -if(Boost_FOUND) - if( NOT Boost_LIBRARY_DIRS ) - # Compatibility Code for backwards compatibility with CMake - # 2.4's FindBoost module. - - # Look for the boost library path. - # Note that the user may not have installed any libraries - # so it is quite possible the Boost_LIBRARY_DIRS may not exist. - set(_boost_LIB_DIR ${Boost_INCLUDE_DIR}) - - if("${_boost_LIB_DIR}" MATCHES "boost-[0-9]+") - get_filename_component(_boost_LIB_DIR ${_boost_LIB_DIR} PATH) - endif() - - if("${_boost_LIB_DIR}" MATCHES "/include$") - # Strip off the trailing "/include" in the path. - get_filename_component(_boost_LIB_DIR ${_boost_LIB_DIR} PATH) - endif() - - if(EXISTS "${_boost_LIB_DIR}/lib") - string(APPEND _boost_LIB_DIR /lib) - elseif(EXISTS "${_boost_LIB_DIR}/stage/lib") - string(APPEND _boost_LIB_DIR "/stage/lib") - else() - set(_boost_LIB_DIR "") - endif() - - if(_boost_LIB_DIR AND EXISTS "${_boost_LIB_DIR}") - set(Boost_LIBRARY_DIRS ${_boost_LIB_DIR}) - endif() - - endif() -else() - # Boost headers were not found so no components were found. - foreach(COMPONENT ${Boost_FIND_COMPONENTS}) - string(TOUPPER ${COMPONENT} UPPERCOMPONENT) - set(Boost_${UPPERCOMPONENT}_FOUND 0) - endforeach() -endif() - -# ------------------------------------------------------------------------ -# Add imported targets -# ------------------------------------------------------------------------ - -if(Boost_FOUND) - # The builtin CMake package in Boost 1.70+ introduces a new name - # for the header-only lib, let's provide the same UI in module mode - if(NOT TARGET Boost::headers) - add_library(Boost::headers INTERFACE IMPORTED) - if(Boost_INCLUDE_DIRS) - set_target_properties(Boost::headers PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${Boost_INCLUDE_DIRS}") - endif() - endif() - - # Define the old target name for header-only libraries for backwards - # compat. - if(NOT TARGET Boost::boost) - add_library(Boost::boost INTERFACE IMPORTED) - set_target_properties(Boost::boost - PROPERTIES INTERFACE_LINK_LIBRARIES Boost::headers) - endif() - - foreach(COMPONENT ${Boost_FIND_COMPONENTS}) - if(_Boost_IMPORTED_TARGETS AND NOT TARGET Boost::${COMPONENT}) - string(TOUPPER ${COMPONENT} UPPERCOMPONENT) - if(Boost_${UPPERCOMPONENT}_FOUND) - if(Boost_USE_STATIC_LIBS) - add_library(Boost::${COMPONENT} STATIC IMPORTED) - else() - # Even if Boost_USE_STATIC_LIBS is OFF, we might have static - # libraries as a result. - add_library(Boost::${COMPONENT} UNKNOWN IMPORTED) - endif() - if(Boost_INCLUDE_DIRS) - set_target_properties(Boost::${COMPONENT} PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${Boost_INCLUDE_DIRS}") - endif() - if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY}") - set_target_properties(Boost::${COMPONENT} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" - IMPORTED_LOCATION "${Boost_${UPPERCOMPONENT}_LIBRARY}") - endif() - if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE}") - set_property(TARGET Boost::${COMPONENT} APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(Boost::${COMPONENT} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "CXX" - IMPORTED_LOCATION_RELEASE "${Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE}") - endif() - if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG}") - set_property(TARGET Boost::${COMPONENT} APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(Boost::${COMPONENT} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "CXX" - IMPORTED_LOCATION_DEBUG "${Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG}") - endif() - if(_Boost_${UPPERCOMPONENT}_DEPENDENCIES) - unset(_Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES) - foreach(dep ${_Boost_${UPPERCOMPONENT}_DEPENDENCIES}) - list(APPEND _Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES Boost::${dep}) - endforeach() - if(COMPONENT STREQUAL "thread") - list(APPEND _Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES Threads::Threads) - endif() - set_target_properties(Boost::${COMPONENT} PROPERTIES - INTERFACE_LINK_LIBRARIES "${_Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES}") - endif() - if(_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES) - set_target_properties(Boost::${COMPONENT} PROPERTIES - INTERFACE_COMPILE_FEATURES "${_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES}") - endif() - endif() - endif() - endforeach() -endif() - -# ------------------------------------------------------------------------ -# Finalize -# ------------------------------------------------------------------------ - -# Report Boost_LIBRARIES -set(Boost_LIBRARIES "") -foreach(_comp IN LISTS Boost_FIND_COMPONENTS) - string(TOUPPER ${_comp} _uppercomp) - if(Boost_${_uppercomp}_FOUND) - list(APPEND Boost_LIBRARIES ${Boost_${_uppercomp}_LIBRARY}) - if(_comp STREQUAL "thread") - list(APPEND Boost_LIBRARIES ${CMAKE_THREAD_LIBS_INIT}) - endif() - endif() -endforeach() - -# Configure display of cache entries in GUI. -foreach(v BOOSTROOT BOOST_ROOT ${_Boost_VARS_INC} ${_Boost_VARS_LIB}) - get_property(_type CACHE ${v} PROPERTY TYPE) - if(_type) - set_property(CACHE ${v} PROPERTY ADVANCED 1) - if("x${_type}" STREQUAL "xUNINITIALIZED") - if("x${v}" STREQUAL "xBoost_ADDITIONAL_VERSIONS") - set_property(CACHE ${v} PROPERTY TYPE STRING) - else() - set_property(CACHE ${v} PROPERTY TYPE PATH) - endif() - endif() - endif() -endforeach() - -# Record last used values of input variables so we can -# detect on the next run if the user changed them. -foreach(v - ${_Boost_VARS_INC} ${_Boost_VARS_LIB} - ${_Boost_VARS_DIR} ${_Boost_VARS_NAME} - ) - if(DEFINED ${v}) - set(_${v}_LAST "${${v}}" CACHE INTERNAL "Last used ${v} value.") - else() - unset(_${v}_LAST CACHE) - endif() -endforeach() - -# Maintain a persistent list of components requested anywhere since -# the last flush. -set(_Boost_COMPONENTS_SEARCHED "${_Boost_COMPONENTS_SEARCHED}") -list(APPEND _Boost_COMPONENTS_SEARCHED ${Boost_FIND_COMPONENTS}) -list(REMOVE_DUPLICATES _Boost_COMPONENTS_SEARCHED) -list(SORT _Boost_COMPONENTS_SEARCHED) -set(_Boost_COMPONENTS_SEARCHED "${_Boost_COMPONENTS_SEARCHED}" - CACHE INTERNAL "Components requested for this build tree.") - -# Restore project's policies -cmake_policy(POP) diff --git a/cmake/GtsamBuildTypes.cmake b/cmake/GtsamBuildTypes.cmake index 9058807ad..e63fbf1dd 100644 --- a/cmake/GtsamBuildTypes.cmake +++ b/cmake/GtsamBuildTypes.cmake @@ -190,7 +190,7 @@ if(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang") endif() if (NOT MSVC) - option(GTSAM_BUILD_WITH_MARCH_NATIVE "Enable/Disable building with all instructions supported by native architecture (binary may not be portable!)" ON) + option(GTSAM_BUILD_WITH_MARCH_NATIVE "Enable/Disable building with all instructions supported by native architecture (binary may not be portable!)" OFF) if(GTSAM_BUILD_WITH_MARCH_NATIVE AND (APPLE AND NOT CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64")) # Add as public flag so all dependant projects also use it, as required # by Eigen to avid crashes due to SIMD vectorization: diff --git a/cmake/HandleEigen.cmake b/cmake/HandleEigen.cmake index b21d16885..c49eb4f8e 100644 --- a/cmake/HandleEigen.cmake +++ b/cmake/HandleEigen.cmake @@ -1,7 +1,10 @@ ############################################################################### # Option for using system Eigen or GTSAM-bundled Eigen - -option(GTSAM_USE_SYSTEM_EIGEN "Find and use system-installed Eigen. If 'off', use the one bundled with GTSAM" OFF) +# Default: Use system's Eigen if found automatically: +find_package(Eigen3 QUIET) +set(USE_SYSTEM_EIGEN_INITIAL_VALUE ${Eigen3_FOUND}) +option(GTSAM_USE_SYSTEM_EIGEN "Find and use system-installed Eigen. If 'off', use the one bundled with GTSAM" ${USE_SYSTEM_EIGEN_INITIAL_VALUE}) +unset(USE_SYSTEM_EIGEN_INITIAL_VALUE) if(NOT GTSAM_USE_SYSTEM_EIGEN) # This option only makes sense if using the embedded copy of Eigen, it is @@ -11,7 +14,7 @@ endif() # Switch for using system Eigen or GTSAM-bundled Eigen if(GTSAM_USE_SYSTEM_EIGEN) - find_package(Eigen3 REQUIRED) + find_package(Eigen3 REQUIRED) # need to find again as REQUIRED # Use generic Eigen include paths e.g. set(GTSAM_EIGEN_INCLUDE_FOR_INSTALL "${EIGEN3_INCLUDE_DIR}") diff --git a/cmake/HandlePrintConfiguration.cmake b/cmake/HandlePrintConfiguration.cmake index 43ee5b57b..04d27c27f 100644 --- a/cmake/HandlePrintConfiguration.cmake +++ b/cmake/HandlePrintConfiguration.cmake @@ -33,6 +33,7 @@ print_build_options_for_target(gtsam) print_config("Use System Eigen" "${GTSAM_USE_SYSTEM_EIGEN} (Using version: ${GTSAM_EIGEN_VERSION})") print_config("Use System Metis" "${GTSAM_USE_SYSTEM_METIS}") +print_config("Using Boost version" "${Boost_VERSION}") if(GTSAM_USE_TBB) print_config("Use Intel TBB" "Yes (Version: ${TBB_VERSION})") diff --git a/doc/ImuFactor.lyx b/doc/ImuFactor.lyx index 0922a3e9c..c335e6949 100644 --- a/doc/ImuFactor.lyx +++ b/doc/ImuFactor.lyx @@ -1,7 +1,9 @@ -#LyX 2.0 created this file. For more info see http://www.lyx.org/ -\lyxformat 413 +#LyX 2.3 created this file. For more info see http://www.lyx.org/ +\lyxformat 544 \begin_document \begin_header +\save_transient_properties true +\origin unavailable \textclass article \use_default_options true \maintain_unincluded_children false @@ -9,16 +11,18 @@ \language_package default \inputencoding auto \fontencoding global -\font_roman default -\font_sans default -\font_typewriter default +\font_roman "default" "default" +\font_sans "default" "default" +\font_typewriter "default" "default" +\font_math "auto" "auto" \font_default_family default \use_non_tex_fonts false \font_sc false \font_osf false -\font_sf_scale 100 -\font_tt_scale 100 - +\font_sf_scale 100 100 +\font_tt_scale 100 100 +\use_microtype false +\use_dash_ligatures true \graphics default \default_output_format default \output_sync 0 @@ -29,16 +33,26 @@ \use_hyperref false \papersize default \use_geometry true -\use_amsmath 1 -\use_esint 1 -\use_mhchem 1 -\use_mathdots 1 +\use_package amsmath 1 +\use_package amssymb 1 +\use_package cancel 1 +\use_package esint 1 +\use_package mathdots 1 +\use_package mathtools 1 +\use_package mhchem 1 +\use_package stackrel 1 +\use_package stmaryrd 1 +\use_package undertilde 1 \cite_engine basic +\cite_engine_type default +\biblio_style plain \use_bibtopic false \use_indices false \paperorientation portrait \suppress_date false +\justification true \use_refstyle 1 +\use_minted 0 \index Index \shortcut idx \color #008000 @@ -51,7 +65,10 @@ \tocdepth 3 \paragraph_separation indent \paragraph_indentation default -\quotes_language english +\is_math_indent 0 +\math_numbering_side default +\quotes_style english +\dynamic_quotes 0 \papercolumns 1 \papersides 1 \paperpagestyle default @@ -65,11 +82,11 @@ \begin_body \begin_layout Title -The new IMU Factor +The New IMU Factor \end_layout \begin_layout Author -Frank Dellaert +Frank Dellaert & Varun Agrawal \end_layout \begin_layout Standard @@ -91,6 +108,282 @@ filename "macros.lyx" \end_layout +\begin_layout Standard +\begin_inset FormulaMacro +\newcommand{\Rnine}{\mathfrak{\mathbb{R}^{9}}} +{\mathfrak{\mathbb{R}^{9}}} +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset FormulaMacro +\newcommand{\Rninethree}{\mathfrak{\mathbb{R}^{9\times3}}} +{\mathfrak{\mathbb{R}^{9\times3}}} +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset FormulaMacro +\newcommand{\Rninesix}{\mathfrak{\mathbb{R}^{9\times6}}} +{\mathfrak{\mathbb{R}^{9\times6}}} +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset FormulaMacro +\newcommand{\Rninenine}{\mathfrak{\mathbb{R}^{9\times9}}} +{\mathfrak{\mathbb{R}^{9\times9}}} +\end_inset + + +\end_layout + +\begin_layout Subsubsection* +IMU Factor +\end_layout + +\begin_layout Standard +The IMU factor has 2 variants: +\end_layout + +\begin_layout Enumerate +ImuFactor is a 5-way factor between the previous pose and velocity, the + current pose and velocity, and the current IMU bias. +\end_layout + +\begin_layout Enumerate +ImuFactor2 is a 3-way factor between the previous NavState, the current + NavState and the current IMU bias. +\end_layout + +\begin_layout Standard +Both variants take a PreintegratedMeasurements object which encodes all + the IMU measurements between the previous timestep and the current timestep. +\end_layout + +\begin_layout Standard +There are also 2 variants of this class: +\end_layout + +\begin_layout Enumerate +Manifold Preintegration: This version keeps track of the incremental NavState + +\begin_inset Formula $\Delta X_{ij}$ +\end_inset + + with respect to the previous NavState, on the NavState manifold itself. + It also keeps track of the +\begin_inset Formula $\Rninesix$ +\end_inset + + Jacobian of +\begin_inset Formula $\Delta X_{ij}$ +\end_inset + + w.r.t. + the bias. + This corresponds to Forster et. + al. +\begin_inset CommandInset citation +LatexCommand cite +key "Forster15rss" +literal "false" + +\end_inset + + +\end_layout + +\begin_layout Enumerate +Tangent Preintegration: This version keeps track of the incremental NavState + in the NavState tangent space instead. + This is a +\begin_inset Formula $\Rnine$ +\end_inset + + vector +\emph on +preintegrated_ +\emph default +. + It also keeps track of the +\begin_inset Formula $\Rninesix$ +\end_inset + + jacobian of the +\emph on +preintegrated_ +\emph default + w.r.t. + the bias. + +\end_layout + +\begin_layout Standard +The main function of a factor is to calculate an error. + This is done exactly the same in both variants: +\begin_inset Formula +\begin{equation} +e(X_{i},X_{j})=X_{j}\ominus\widehat{X_{j}}\label{eq:imu-factor-error} +\end{equation} + +\end_inset + +where the predicted NavState +\begin_inset Formula $\widehat{X_{j}}$ +\end_inset + + at time +\begin_inset Formula $t_{j}$ +\end_inset + + is a function of the NavState +\begin_inset Formula $X_{i}$ +\end_inset + + at time +\begin_inset Formula $t_{i}$ +\end_inset + + and the preintegrated measurements +\begin_inset Formula $PIM$ +\end_inset + +: +\begin_inset Formula +\[ +\widehat{X_{j}}=f(X_{i},PIM) +\] + +\end_inset + +The noise model associated with this factor is assumed to be zero-mean Gaussian + with a +\begin_inset Formula $9\times9$ +\end_inset + + covariance matrix +\begin_inset Formula $\Sigma_{ij}$ +\end_inset + +, which is defined in the tangent space +\begin_inset Formula $T_{X_{j}}\mathcal{N}$ +\end_inset + + of the NavState manifold at the NavState +\begin_inset Formula $X_{j}$ +\end_inset + +. + This (discrete-time) covariance matrix is computed in the preintegrated + measurement class, of which there are two variants as discussed above. +\end_layout + +\begin_layout Subsubsection* +Combined IMU Factor +\end_layout + +\begin_layout Standard +The IMU factor above requires that bias drift over time be modeled as a + separate stochastic process (using a BetweenFactor for example), a crucial + aspect given that the preintegrated measurements depend on these bias values + and are thus correlated. + For this reason, we provide another type of IMU factor which we term the + Combined IMU Factor. + This factor similarly has 2 variants: +\end_layout + +\begin_layout Enumerate +CombinedImuFactor is a 6-way factor between the previous pose, velocity + and IMU bias and the current pose, velocity and IMU bias. +\end_layout + +\begin_layout Enumerate +CombinedImuFactor2 is a 4-way factor between the previous NavState and IMU + bias and the current NavState and IMU bias. +\end_layout + +\begin_layout Standard +Since the Combined IMU Factor has a larger state variable due to the inclusion + of IMU biases, the noise model associated with this factor is assumed to + be a zero mean Gaussian with a +\begin_inset Formula $15\times15$ +\end_inset + + covariance matrix +\begin_inset Formula $\Sigma$ +\end_inset + +, similarly defined on the tangent space of the NavState manifold. +\end_layout + +\begin_layout Subsubsection* +Covariance Matrices +\end_layout + +\begin_layout Standard +For IMU preintegration, it is important to propagate the uncertainty accurately + as well. + As such, we detail the various covariance matrices used in the preintegration + step. +\end_layout + +\begin_layout Itemize +Gyroscope Covariance +\begin_inset Formula $Q_{\omega}$ +\end_inset + +: Measurement uncertainty of the gyroscope. +\end_layout + +\begin_layout Itemize +Gyroscope Bias Covariance +\begin_inset Formula $Q_{\Delta b^{\omega}}$ +\end_inset + + : The covariance associated with the gyroscope bias random walk. +\end_layout + +\begin_layout Itemize +Accelerometer Covariance +\begin_inset Formula $Q_{acc}$ +\end_inset + + : Measurement uncertainty of the accelerometer. +\end_layout + +\begin_layout Itemize +Accelerometer Bias Covariance +\begin_inset Formula $Q_{\Delta b^{acc}}$ +\end_inset + + : The covariance associated with the accelerometer bias random walk. +\end_layout + +\begin_layout Itemize +Integration Covariance +\begin_inset Formula $Q_{int}$ +\end_inset + + : This is the uncertainty due to modeling errors in the integration from + acceleration to velocity and position. +\end_layout + +\begin_layout Itemize +Initial Bias Estimate Covariance +\begin_inset Formula $Q_{init}$ +\end_inset + + : This is the uncertainty associated with the estimation of the bias (since + we jointly estimate the bias as well). +\end_layout + \begin_layout Subsubsection* Navigation States \end_layout @@ -244,7 +537,7 @@ X(t)=\left\{ R_{0},P_{0}+V_{0}t,V_{0}\right\} then the differential equation describing the trajectory is \begin_inset Formula \[ -\dot{X}(t)=\left[0_{3x3},V_{0},0_{3x1}\right],\,\,\,\,\, X(0)=\left\{ R_{0},P_{0},V_{0}\right\} +\dot{X}(t)=\left[0_{3x3},V_{0},0_{3x1}\right],\,\,\,\,\,X(0)=\left\{ R_{0},P_{0},V_{0}\right\} \] \end_inset @@ -285,7 +578,15 @@ acceleration \end_inset in the body frame. - We know (from Murray84book) that the derivative of + We know (from +\begin_inset CommandInset citation +LatexCommand cite +key "Murray94book" +literal "false" + +\end_inset + +) that the derivative of \begin_inset Formula $R$ \end_inset @@ -592,6 +893,7 @@ Lie Group Methods \begin_inset CommandInset citation LatexCommand cite key "Iserles00an" +literal "true" \end_inset @@ -602,7 +904,7 @@ key "Iserles00an" , \begin_inset Formula \begin{equation} -\dot{R}(t)=F(R,t),\,\,\,\, R(0)=R_{0}\label{eq:diffSo3} +\dot{R}(t)=F(R,t),\,\,\,\,R(0)=R_{0}\label{eq:diffSo3} \end{equation} \end_inset @@ -707,15 +1009,6 @@ In other words, the vector field Retractions \end_layout -\begin_layout Standard -\begin_inset FormulaMacro -\newcommand{\Rnine}{\mathfrak{\mathbb{R}^{9}}} -{\mathfrak{\mathbb{R}^{9}}} -\end_inset - - -\end_layout - \begin_layout Standard Note that the use of the exponential map in local coordinate mappings is not obligatory, even in the context of Lie groups. @@ -947,8 +1240,8 @@ Or, as another way to state this, if we solve the differential equations \begin_inset Formula \begin{eqnarray*} \dot{\theta}(t) & = & H(\theta)^{-1}\,\omega^{b}(t)\\ -\dot{p}(t) & = & R_{0}^{T}\, V_{0}+v(t)\\ -\dot{v}(t) & = & R_{0}^{T}\, g+R_{b}^{0}(t)a^{b}(t) +\dot{p}(t) & = & R_{0}^{T}\,V_{0}+v(t)\\ +\dot{v}(t) & = & R_{0}^{T}\,g+R_{b}^{0}(t)a^{b}(t) \end{eqnarray*} \end_inset @@ -1000,7 +1293,15 @@ In the IMU factor, we need to predict the NavState needs to be known in order to compensate properly for the initial velocity and rotated gravity vector. - Hence, the idea of Lupton was to split up + Hence, the idea of Lupton +\begin_inset CommandInset citation +LatexCommand cite +key "Lupton12tro" +literal "false" + +\end_inset + + was to split up \begin_inset Formula $v(t)$ \end_inset @@ -1015,7 +1316,7 @@ v(t)=v_{g}(t)+v_{a}(t) evolving as \begin_inset Formula \begin{eqnarray*} -\dot{v}_{g}(t) & = & R_{i}^{T}\, g\\ +\dot{v}_{g}(t) & = & R_{i}^{T}\,g\\ \dot{v}_{a}(t) & = & R_{b}^{i}(t)a^{b}(t) \end{eqnarray*} @@ -1041,7 +1342,7 @@ p(t)=p_{i}(t)+p_{g}(t)+p_{v}(t) evolving as \begin_inset Formula \begin{eqnarray*} -\dot{p}_{i}(t) & = & R_{i}^{T}\, V_{i}\\ +\dot{p}_{i}(t) & = & R_{i}^{T}\,V_{i}\\ \dot{p}_{g}(t) & = & v_{g}(t)=R_{i}^{T}gt\\ \dot{p}_{v}(t) & = & v_{a}(t) \end{eqnarray*} @@ -1057,8 +1358,11 @@ p_{g}(t) & = & R_{i}^{T}\frac{gt^{2}}{2} \end_inset -The recipe for the IMU factor is then, in summary. - Solve the ordinary differential equations +The recipe for the IMU factor is then, in summary: +\end_layout + +\begin_layout Enumerate +Solve the ordinary differential equations \begin_inset Formula \begin{eqnarray*} \dot{\theta}(t) & = & H(\theta(t))^{-1}\,\omega^{b}(t)\\ @@ -1077,7 +1381,10 @@ starting from zero, up to time \end_inset at all times. - Form the local coordinate vector as +\end_layout + +\begin_layout Enumerate +Form the local coordinate vector as \begin_inset Formula \[ \zeta(t_{ij})=\left[\theta(t_{ij}),p(t_{ij}),v(t_{ij})\right]=\left[\theta(t_{ij}),R_{i}^{T}V_{i}t_{ij}+R_{i}^{T}\frac{gt_{ij}^{2}}{2}+p_{v}(t_{ij}),R_{i}^{T}gt_{ij}+v_{a}(t_{ij})\right] @@ -1085,6 +1392,10 @@ starting from zero, up to time \end_inset + +\end_layout + +\begin_layout Enumerate Predict the NavState \begin_inset Formula $X_{j}$ \end_inset @@ -1096,7 +1407,7 @@ Predict the NavState from \begin_inset Formula \[ -X_{j}=\mathcal{R}_{X_{i}}(\zeta(t_{ij}))=\left\{ \Phi_{R_{0}}\left(\theta(t_{ij})\right),P_{i}+V_{i}t_{ij}+\frac{gt_{ij}^{2}}{2}+R_{i}\, p_{v}(t_{ij}),V_{i}+gt_{ij}+R_{i}\, v_{a}(t_{ij})\right\} +X_{j}=\mathcal{R}_{X_{i}}(\zeta(t_{ij}))=\left\{ \Phi_{R_{0}}\left(\theta(t_{ij})\right),P_{i}+V_{i}t_{ij}+\frac{gt_{ij}^{2}}{2}+R_{i}\,p_{v}(t_{ij}),V_{i}+gt_{ij}+R_{i}\,v_{a}(t_{ij})\right\} \] \end_inset @@ -1179,11 +1490,59 @@ where we defined the rotation matrix \end_layout \begin_layout Subsubsection* -Noise Propagation +Noise Modeling \end_layout \begin_layout Standard -Even when we assume uncorrelated noise on +Given the above solutions to the differential equations, we add noise modeling + to account for the various sources of error in the system +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{eqnarray} +\theta_{k+1} & = & \theta_{k}+H(\theta_{k})^{-1}\,(\omega_{k}^{b}+\epsilon_{k}^{\omega}-b_{k}^{\omega}-\epsilon_{init}^{\omega})\Delta_{t}\nonumber \\ +p_{k+1} & = & p_{k}+v_{k}\Delta_{t}+R_{k}(a_{k}^{b}+\epsilon_{k}^{a}-b_{k}^{a}-\epsilon_{init}^{a})\frac{\Delta_{t}^{2}}{2}+\epsilon_{k}^{int}\label{eq:preintegration}\\ +v_{k+1} & = & v_{k}+R_{k}(a_{k}^{b}+\epsilon_{k}^{a}-b_{k}^{a}-\epsilon_{init}^{a})\Delta_{t}\nonumber \\ +b_{k+1}^{a} & = & b_{k}^{a}+\epsilon_{k}^{b^{a}}\nonumber \\ +b_{k+1}^{\omega} & = & b_{k}^{\omega}+\epsilon_{k}^{b^{\omega}}\nonumber +\end{eqnarray} + +\end_inset + + +\end_layout + +\begin_layout Standard +which we can write compactly as, +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{eqnarray} +\theta_{k+1} & = & f_{\theta}(\theta_{k},b_{k}^{w},\epsilon_{k}^{\omega},\epsilon_{init}^{b^{\omega}})\label{eq:compact-preintegration}\\ +p_{k+1} & = & f_{p}(p_{k},v_{k},\theta_{k},b_{k}^{a},\epsilon_{k}^{a},\epsilon_{init}^{a},\epsilon_{k}^{int})\nonumber \\ +v_{k+1} & = & f_{v}(v_{k,}\theta_{k,}b_{k}^{a},\epsilon_{k}^{a},\epsilon_{init}^{a})\nonumber \\ +b_{k+1}^{a} & = & f_{b^{a}}(b_{k}^{a},\epsilon_{k}^{b^{a}})\nonumber \\ +b_{k+1}^{\omega} & = & f_{b^{\omega}}(b_{k}^{\omega},\epsilon_{k}^{b^{\omega}})\nonumber +\end{eqnarray} + +\end_inset + + +\end_layout + +\begin_layout Subsubsection* +Noise Propagation in IMU Factor +\end_layout + +\begin_layout Standard +We wish to compute the ImuFactor covariance matrix +\begin_inset Formula $\Sigma_{ij}$ +\end_inset + +. + Even when we assume uncorrelated noise on \begin_inset Formula $\omega^{b}$ \end_inset @@ -1201,11 +1560,12 @@ Even when we assume uncorrelated noise on \end_inset appear in multiple places. - To model the noise propagation, let us define + To model the noise propagation, let us define the preintegrated navigation + state \begin_inset Formula $\zeta_{k}=[\theta_{k},p_{k},v_{k}]$ \end_inset - and rewrite Eqns. +, as a 9D vector on tangent space at and rewrite Eqns. ( \begin_inset CommandInset ref LatexCommand ref @@ -1239,7 +1599,7 @@ Then the noise on propagates as \begin_inset Formula \begin{equation} -\Sigma_{k+1}=A_{k}\Sigma_{k}A_{k}^{T}+B_{k}\Sigma_{\eta}^{ad}B_{k}+C_{k}\Sigma_{\eta}^{gd}C_{k}\label{eq:prop} +\Sigma_{k+1}=A_{k}\Sigma_{k}A_{k}^{T}+B_{k}\Sigma_{\eta}^{ad}B_{k}^{T}+C_{k}\Sigma_{\eta}^{gd}C_{k}^{T}\label{eq:prop} \end{equation} \end_inset @@ -1280,6 +1640,42 @@ where \begin_inset Formula $\omega^{b}$ \end_inset +. + Note that +\begin_inset Formula $\Sigma_{k},$ +\end_inset + + +\begin_inset Formula $\Sigma_{\eta}^{ad}$ +\end_inset + +, and +\begin_inset Formula $\Sigma_{\eta}^{gd}$ +\end_inset + + are discrete time covariances with +\begin_inset Formula $\Sigma_{\eta}^{ad}$ +\end_inset + +, and +\begin_inset Formula $\Sigma_{\eta}^{gd}$ +\end_inset + +divided by +\begin_inset Formula $\Delta_{t}$ +\end_inset + +. + Please see the section on Covariance Discretization +\begin_inset CommandInset ref +LatexCommand vpageref +reference "subsec:Covariance-Discretization" +plural "false" +caps "false" +noprefix "false" + +\end_inset + . \end_layout @@ -1295,7 +1691,7 @@ We start with the noise propagation on \begin_layout Standard \begin_inset Formula \[ -\deriv{\theta_{k+1}}{\theta_{k}}=I_{3x3}+\deriv{H(\theta_{k})^{-1}\omega_{k}^{b}}{\theta_{k}}\Delta_{t} +\deriv{\theta_{k+1}}{\theta_{k}}=I_{3\times3}+\deriv{H(\theta_{k})^{-1}\omega_{k}^{b}}{\theta_{k}}\Delta_{t} \] \end_inset @@ -1307,7 +1703,7 @@ It can be shown that for small we have \begin_inset Formula \[ -\deriv{H(\theta_{k})^{-1}\omega_{k}^{b}}{\theta_{k}}\approx-\frac{1}{2}\Skew{\omega_{k}^{b}}\mbox{ and hence }\deriv{\theta_{k+1}}{\theta_{k}}=I_{3x3}-\frac{\Delta t}{2}\Skew{\omega_{k}^{b}} +\deriv{H(\theta_{k})^{-1}\omega_{k}^{b}}{\theta_{k}}\approx-\frac{1}{2}\Skew{\omega_{k}^{b}}\mbox{ and hence }\deriv{\theta_{k+1}}{\theta_{k}}=I_{3\times3}-\frac{\Delta_{t}}{2}\Skew{\omega_{k}^{b}} \] \end_inset @@ -1357,9 +1753,9 @@ Putting all this together, we finally obtain \begin_inset Formula \[ A_{k}\approx\left[\begin{array}{ccc} -I_{3\times3}-\frac{\Delta_{t}}{2}\Skew{\omega_{k}^{b}}\\ +I_{3\times3}-\frac{\Delta_{t}}{2}\Skew{\omega_{k}^{b}} & 0_{3\times3} & 0_{3\times3}\\ R_{k}\Skew{-a_{k}^{b}}H(\theta_{k})\frac{\Delta_{t}}{2}^{2} & I_{3\times3} & I_{3\times3}\Delta_{t}\\ -R_{k}\Skew{-a_{k}^{b}}H(\theta_{k})\Delta_{t} & & I_{3\times3} +R_{k}\Skew{-a_{k}^{b}}H(\theta_{k})\Delta_{t} & 0_{3\times3} & I_{3\times3} \end{array}\right] \] @@ -1372,7 +1768,7 @@ B_{k}=\left[\begin{array}{c} 0_{3\times3}\\ R_{k}\frac{\Delta_{t}}{2}^{2}\\ R_{k}\Delta_{t} -\end{array}\right],\,\,\,\, C_{k}=\left[\begin{array}{c} +\end{array}\right],\,\,\,\,C_{k}=\left[\begin{array}{c} H(\theta_{k})^{-1}\Delta_{t}\\ 0_{3\times3}\\ 0_{3\times3} @@ -1384,9 +1780,393 @@ H(\theta_{k})^{-1}\Delta_{t}\\ \end_layout +\begin_layout Subsubsection* +Noise Propagation in Combined IMU Factor +\end_layout + +\begin_layout Standard +We can similarly account for bias drift over time, as is commonly seen in + commercial grade IMUs. +\end_layout + +\begin_layout Standard +We expand the state vector as +\begin_inset Formula $\zeta_{k}=[\theta_{k},p_{k},v_{k},b_{k}^{a},b_{k}^{\omega}]$ +\end_inset + + to include the bias terms and define the augmented noise vector +\begin_inset Formula $\epsilon=[\epsilon_{k}^{\omega},\epsilon_{k}^{a},\epsilon_{k}^{b^{a}},\epsilon_{k}^{b^{\omega}},\epsilon_{k}^{int},\epsilon_{init}^{b^{a}},\epsilon_{init}^{b^{\omega}}]$ +\end_inset + +. + This gives the noise propagation equation as +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{equation} +\Sigma_{k+1}=F_{k}\Sigma_{k}F_{k}^{T}+G_{k}Q_{k}G_{k}^{T}\label{eq:prop-combined} +\end{equation} + +\end_inset + + +\end_layout + +\begin_layout Standard +where +\begin_inset Formula $F_{k}$ +\end_inset + + is the +\begin_inset Formula $15\times15$ +\end_inset + + derivative of +\begin_inset Formula $f$ +\end_inset + + wrpt this new +\begin_inset Formula $\zeta$ +\end_inset + +, and +\begin_inset Formula $G_{k}$ +\end_inset + + is the +\begin_inset Formula $15\times21$ +\end_inset + + matrix for first order uncertainty propagation. + +\begin_inset Formula $Q_{k}$ +\end_inset + + defines the uncertainty of +\begin_inset Formula $\eta$ +\end_inset + +. + The top-left +\begin_inset Formula $9\times9$ +\end_inset + + of +\begin_inset Formula $F_{k}$ +\end_inset + + is the same as +\begin_inset Formula $A_{k}$ +\end_inset + +, thus we only have the jacobians wrpt the biases left to account for. +\end_layout + +\begin_layout Standard +Conveniently, the jacobians of the pose and velocity wrpt the biases are + already computed in the +\emph on +ImuFactor +\emph default +derivation as matrices +\begin_inset Formula $B_{k}$ +\end_inset + + and +\begin_inset Formula $C_{k}$ +\end_inset + +, while they are identity matrices wrpt the biases themselves. + Thus, we can easily plug-in the values from the previous section to give + us the final result +\end_layout + +\begin_layout Standard +\begin_inset Formula +\[ +F_{k}\approx\left[\begin{array}{ccccc} +I_{3\times3}-\frac{\Delta_{t}}{2}\Skew{\omega_{k}^{b}} & 0_{3\times3} & 0_{3\times3} & 0_{3\times3} & H(\theta_{k})^{-1}\Delta_{t}\\ +R_{k}\Skew{-a_{k}^{b}}H(\theta_{k})\frac{\Delta_{t}}{2}^{2} & I_{3\times3} & I_{3\times3}\Delta_{t} & R_{k}\frac{\Delta_{t}}{2}^{2} & 0_{3\times3}\\ +R_{k}\Skew{-a_{k}^{b}}H(\theta_{k})\Delta_{t} & 0_{3\times3} & I_{3\times3} & R_{k}\Delta_{t} & 0_{3\times3}\\ +0_{3\times3} & 0_{3\times3} & 0_{3\times3} & I_{3\times3} & 0_{3\times3}\\ +0_{3\times3} & 0_{3\times3} & 0_{3\times3} & 0_{3\times3} & I_{3\times3} +\end{array}\right] +\] + +\end_inset + + +\end_layout + +\begin_layout Standard +Similarly for +\begin_inset Formula $Q_{k},$ +\end_inset + +we get +\end_layout + +\begin_layout Standard +\begin_inset Formula +\[ +Q_{k}=\left[\begin{array}{ccccccc} +\Sigma^{\omega}\\ + & \Sigma^{a}\\ + & & \Sigma^{b^{a}}\\ + & & & \Sigma^{b^{\omega}}\\ + & & & & \Sigma^{int}\\ + & & & & & \Sigma^{init_{11}} & \Sigma^{init_{12}}\\ + & & & & & \Sigma^{init_{21}} & \Sigma^{init_{22}} +\end{array}\right] +\] + +\end_inset + + +\end_layout + +\begin_layout Standard +and for +\begin_inset Formula $G_{k}$ +\end_inset + + we get +\end_layout + +\begin_layout Standard +\begin_inset Formula +\[ +G_{k}=\left[\begin{array}{ccccccc} +\deriv{\theta}{\epsilon^{\omega}} & \deriv{\theta}{\epsilon^{a}} & \deriv{\theta}{\epsilon^{b^{a}}} & \deriv{\theta}{\epsilon^{b^{\omega}}} & \deriv{\theta}{\epsilon^{int}} & \deriv{\theta}{\epsilon_{init}^{b^{a}}} & \deriv{\theta}{\epsilon_{init}^{b^{\omega}}}\\ +\deriv p{\epsilon^{\omega}} & \deriv p{\epsilon^{a}} & \deriv p{\epsilon^{b^{a}}} & \deriv p{\epsilon^{b^{\omega}}} & \deriv p{\epsilon^{int}} & \deriv p{\epsilon_{init}^{b^{a}}} & \deriv p{\epsilon_{init}^{b^{\omega}}}\\ +\deriv v{\epsilon^{\omega}} & \deriv v{\epsilon^{a}} & \deriv v{\epsilon^{b^{a}}} & \deriv v{\epsilon^{b^{\omega}}} & \deriv v{\epsilon^{int}} & \deriv v{\epsilon_{init}^{b^{a}}} & \deriv v{\epsilon_{init}^{b^{\omega}}}\\ +\deriv{b^{a}}{\epsilon^{\omega}} & \deriv{b^{a}}{\epsilon^{a}} & \deriv{b^{a}}{\epsilon^{b^{a}}} & \deriv{b^{a}}{\epsilon^{b^{\omega}}} & \deriv{b^{a}}{\epsilon^{int}} & \deriv{b^{a}}{\epsilon_{init}^{b^{a}}} & \deriv{b^{a}}{\epsilon_{init}^{b^{\omega}}}\\ +\deriv{b^{\omega}}{\epsilon^{\omega}} & \deriv{b^{\omega}}{\epsilon^{a}} & \deriv{b^{\omega}}{\epsilon^{b^{a}}} & \deriv{b^{\omega}}{\epsilon^{b^{\omega}}} & \deriv{b^{\omega}}{\epsilon^{int}} & \deriv{b^{\omega}}{\epsilon_{init}^{b^{a}}} & \deriv{b^{\omega}}{\epsilon_{init}^{b^{\omega}}} +\end{array}\right]=\left[\begin{array}{ccccccc} +\deriv{\theta}{\epsilon^{\omega}} & 0 & 0 & 0 & 0 & 0 & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\\ +0 & \deriv p{\epsilon^{a}} & 0 & 0 & \deriv p{\epsilon^{int}} & \deriv p{\eta_{init}^{b^{a}}} & 0\\ +0 & \deriv v{\epsilon^{a}} & 0 & 0 & 0 & \deriv v{\eta_{init}^{b^{a}}} & 0\\ +0 & 0 & I_{3\times3} & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & I_{3\times3} & 0 & 0 & 0 +\end{array}\right] +\] + +\end_inset + + +\end_layout + +\begin_layout Standard +We can perform the block-wise computation of +\begin_inset Formula $G_{k}Q_{k}G_{k}^{T}$ +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset Formula +\[ +G_{k}Q_{k}G_{k}^{T}=\left[\begin{array}{ccccccc} +\deriv{\theta}{\epsilon^{\omega}} & 0 & 0 & 0 & 0 & 0 & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\\ +0 & \deriv p{\epsilon^{a}} & 0 & 0 & \deriv p{\epsilon^{int}} & \deriv p{\eta_{init}^{b^{a}}} & 0\\ +0 & \deriv v{\epsilon^{a}} & 0 & 0 & 0 & \deriv v{\eta_{init}^{b^{a}}} & 0\\ +0 & 0 & I_{3\times3} & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & I_{3\times3} & 0 & 0 & 0 +\end{array}\right]\left[\begin{array}{ccccccc} +\Sigma^{\omega}\\ + & \Sigma^{a}\\ + & & \Sigma^{b^{a}}\\ + & & & \Sigma^{b^{\omega}}\\ + & & & & \Sigma^{int}\\ + & & & & & \Sigma^{init_{11}} & \Sigma^{init_{12}}\\ + & & & & & \Sigma^{init_{21}} & \Sigma^{init_{22}} +\end{array}\right]G_{k}^{T} +\] + +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset Formula +\[ +G_{k}Q_{k}G_{k}^{T}=\left[\begin{array}{ccccccc} +\deriv{\theta}{\epsilon^{\omega}}\Sigma^{\omega} & 0 & 0 & 0 & 0 & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}} & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{22}}\\ +0 & \deriv p{\epsilon^{a}}\Sigma^{a} & 0 & 0 & \deriv p{\epsilon^{int}}\Sigma^{int} & \deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}} & \deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\\ +0 & \deriv v{\epsilon^{a}}\Sigma^{a} & 0 & 0 & 0 & \deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}} & \deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\\ +0 & 0 & \Sigma^{b^{a}} & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & \Sigma^{b^{\omega}} & 0 & 0 & 0 +\end{array}\right]G_{k}^{T} +\] + +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{multline*} +G_{k}Q_{k}G_{k}^{T}=\left[\begin{array}{ccccccc} +\deriv{\theta}{\epsilon^{\omega}}\Sigma^{\omega} & 0 & 0 & 0 & 0 & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}} & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{22}}\\ +0 & \deriv p{\epsilon^{a}}\Sigma^{a} & 0 & 0 & \deriv p{\epsilon^{int}}\Sigma^{int} & \deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}} & \deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\\ +0 & \deriv v{\epsilon^{a}}\Sigma^{a} & 0 & 0 & 0 & \deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}} & \deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\\ +0 & 0 & \Sigma^{b^{a}} & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & \Sigma^{b^{\omega}} & 0 & 0 & 0 +\end{array}\right]\\ +\left[\begin{array}{ccccc} +\deriv{\theta}{\epsilon^{\omega}}^{T} & 0 & 0 & 0 & 0\\ +0 & \deriv p{\epsilon^{a}}^{T} & \deriv v{\epsilon^{a}}^{T} & 0 & 0\\ +0 & 0 & 0 & I_{3\times3} & 0\\ +0 & 0 & 0 & 0 & I_{3\times3}\\ +0 & \deriv p{\epsilon^{int}}^{T} & 0 & 0 & 0\\ +0 & \deriv p{\eta_{init}^{b^{a}}}^{T} & \deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & 0 & 0 & 0 & 0 +\end{array}\right] +\end{multline*} + +\end_inset + + +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{multline*} +=\\ +\left[\begin{array}{ccccc} +\deriv{\theta}{\epsilon^{\omega}}\Sigma^{\omega}\deriv{\theta}{\epsilon^{\omega}}^{T}+\deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{22}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}}\deriv p{\eta_{init}^{b^{a}}}^{T} & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & \deriv p{\epsilon^{a}}\Sigma^{a}\deriv p{\epsilon^{a}}^{T}+\deriv p{\epsilon^{int}}\Sigma^{int}\deriv p{\epsilon^{int}}^{T}\\ + & +\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv p{\eta_{init}^{b^{a}}}^{T} & \deriv p{\epsilon^{a}}\Sigma^{a}\deriv v{\epsilon^{a}}^{T}+\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +\deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & \deriv v{\epsilon^{a}}\Sigma^{a}\deriv p{\epsilon^{a}}^{T}+\deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv p{\eta_{init}^{b^{a}}}^{T} & \deriv v{\epsilon^{a}}\Sigma^{a}\deriv v{\epsilon^{a}}^{T}+\deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +0 & 0 & 0 & \Sigma^{b^{a}} & 0\\ +0 & 0 & 0 & 0 & \Sigma^{b^{\omega}} +\end{array}\right] +\end{multline*} + +\end_inset + + +\end_layout + +\begin_layout Standard +which we can break into 3 matrices for clarity, representing the main diagonal + and off-diagonal elements +\end_layout + +\begin_layout Standard +\begin_inset Formula +\begin{multline*} +=\\ +\left[\begin{array}{ccccc} +\deriv{\theta}{\epsilon^{\omega}}\Sigma^{\omega}\deriv{\theta}{\epsilon^{\omega}}^{T} & 0 & 0 & 0 & 0\\ +0 & \deriv p{\epsilon^{a}}\Sigma^{a}\deriv p{\epsilon^{a}}^{T} & 0 & 0 & 0\\ +0 & 0 & \deriv v{\epsilon^{a}}\Sigma^{a}\deriv v{\epsilon^{a}}^{T} & 0 & 0\\ +0 & 0 & 0 & \Sigma^{b^{a}} & 0\\ +0 & 0 & 0 & 0 & \Sigma^{b^{\omega}} +\end{array}\right]+\\ +\left[\begin{array}{ccccc} +\deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{22}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & 0 & 0 & 0 & 0\\ +0 & \deriv p{\epsilon^{int}}\Sigma^{int}\deriv p{\epsilon^{int}}^{T}+\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv p{\eta_{init}^{b^{a}}}^{T} & 0 & 0 & 0\\ +0 & 0 & \deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +0 & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & 0 & 0 +\end{array}\right]+\\ +\left[\begin{array}{ccccc} +0 & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}}\deriv p{\eta_{init}^{b^{a}}}^{T} & \deriv{\theta}{\eta_{init}^{b^{\omega}}}\Sigma^{init_{21}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & 0 & \deriv p{\epsilon^{a}}\Sigma^{a}\deriv v{\epsilon^{a}}^{T}+\deriv p{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv v{\eta_{init}^{b^{a}}}^{T} & 0 & 0\\ +\deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{12}}\deriv{\theta}{\eta_{init}^{b^{\omega}}}^{T} & \deriv v{\epsilon^{a}}\Sigma^{a}\deriv p{\epsilon^{a}}^{T}+\deriv v{\eta_{init}^{b^{a}}}\Sigma^{init_{11}}\deriv p{\eta_{init}^{b^{a}}}^{T} & 0 & 0 & 0\\ +0 & 0 & 0 & 0 & 0\\ +0 & 0 & 0 & 0 & 0 +\end{array}\right] +\end{multline*} + +\end_inset + + +\end_layout + +\begin_layout Subsubsection* +Covariance Discretization +\begin_inset CommandInset label +LatexCommand label +name "subsec:Covariance-Discretization" + +\end_inset + + +\end_layout + +\begin_layout Standard +So far, all the covariances are assumed to be continuous since the state + and measurement models are considered to be continuous-time stochastic + processes. + However, we sample measurements in a discrete-time fashion, necessitating + the need to convert the covariances to their discrete time equivalents. +\end_layout + +\begin_layout Standard +The IMU is modeled as a first order Gauss-Markov process, with a measurement + noise and a process noise. + Following +\begin_inset CommandInset citation +LatexCommand cite +after "Alg. 1 Page 57" +key "Nikolic16thesis" +literal "false" + +\end_inset + + and +\begin_inset CommandInset citation +LatexCommand cite +after "Eqns 129-130" +key "Trawny05report_IndirectKF" +literal "false" + +\end_inset + +, the measurement noises +\begin_inset Formula $[\epsilon^{a},\epsilon^{\omega},\epsilon_{init}]$ +\end_inset + + are simply scaled by +\begin_inset Formula $\frac{1}{\Delta t}$ +\end_inset + +, and the process noises +\begin_inset Formula $[\epsilon^{int},\epsilon^{b^{a}},\epsilon^{b^{\omega}}]$ +\end_inset + + are scaled by +\begin_inset Formula $\Delta t$ +\end_inset + + where +\begin_inset Formula $\Delta t$ +\end_inset + + is the time interval between 2 consecutive samples. + For a thorough explanation of the discretization process, please refer + to +\begin_inset CommandInset citation +LatexCommand cite +after "Section 8.1" +key "Simon06book" +literal "false" + +\end_inset + +. +\end_layout + \begin_layout Standard \begin_inset CommandInset bibtex LatexCommand bibtex +btprint "btPrintCited" bibfiles "refs" options "plain" diff --git a/doc/ImuFactor.pdf b/doc/ImuFactor.pdf index 0b13c1f59..1823cbc4a 100644 Binary files a/doc/ImuFactor.pdf and b/doc/ImuFactor.pdf differ diff --git a/doc/PreintegratedIMUJacobians.pdf b/doc/PreintegratedIMUJacobians.pdf new file mode 100644 index 000000000..02616ee07 Binary files /dev/null and b/doc/PreintegratedIMUJacobians.pdf differ diff --git a/doc/refs.bib b/doc/refs.bib index 414773483..ec42fb032 100644 --- a/doc/refs.bib +++ b/doc/refs.bib @@ -1,26 +1,72 @@ +%% This BibTeX bibliography file was created using BibDesk. +%% https://bibdesk.sourceforge.io/ + +%% Created for Varun Agrawal at 2021-09-27 17:39:09 -0400 + + +%% Saved with string encoding Unicode (UTF-8) + + + +@article{Lupton12tro, + author = {Lupton, Todd and Sukkarieh, Salah}, + date-added = {2021-09-27 17:38:56 -0400}, + date-modified = {2021-09-27 17:39:09 -0400}, + doi = {10.1109/TRO.2011.2170332}, + journal = {IEEE Transactions on Robotics}, + number = {1}, + pages = {61-76}, + title = {Visual-Inertial-Aided Navigation for High-Dynamic Motion in Built Environments Without Initial Conditions}, + volume = {28}, + year = {2012}, + Bdsk-Url-1 = {https://doi.org/10.1109/TRO.2011.2170332}} + +@inproceedings{Forster15rss, + author = {Christian Forster and Luca Carlone and Frank Dellaert and Davide Scaramuzza}, + booktitle = {Robotics: Science and Systems}, + date-added = {2021-09-26 20:44:41 -0400}, + date-modified = {2021-09-26 20:45:03 -0400}, + title = {IMU Preintegration on Manifold for Efficient Visual-Inertial Maximum-a-Posteriori Estimation}, + year = {2015}} + @article{Iserles00an, - title = {Lie-group methods}, - author = {Iserles, Arieh and Munthe-Kaas, Hans Z and - N{\o}rsett, Syvert P and Zanna, Antonella}, - journal = {Acta Numerica 2000}, - volume = {9}, - pages = {215--365}, - year = {2000}, - publisher = {Cambridge Univ Press} -} + author = {Iserles, Arieh and Munthe-Kaas, Hans Z and N{\o}rsett, Syvert P and Zanna, Antonella}, + journal = {Acta Numerica 2000}, + pages = {215--365}, + publisher = {Cambridge Univ Press}, + title = {Lie-group methods}, + volume = {9}, + year = {2000}} @book{Murray94book, - title = {A mathematical introduction to robotic manipulation}, - author = {Murray, Richard M and Li, Zexiang and Sastry, S - Shankar and Sastry, S Shankara}, - year = {1994}, - publisher = {CRC press} -} + author = {Murray, Richard M and Li, Zexiang and Sastry, S Shankar and Sastry, S Shankara}, + publisher = {CRC press}, + title = {A mathematical introduction to robotic manipulation}, + year = {1994}} @book{Spivak65book, - title = {Calculus on manifolds}, - author = {Spivak, Michael}, - volume = {1}, - year = {1965}, - publisher = {WA Benjamin New York} -} \ No newline at end of file + author = {Spivak, Michael}, + publisher = {WA Benjamin New York}, + title = {Calculus on manifolds}, + volume = {1}, + year = {1965}} + +@phdthesis{Nikolic16thesis, + title={Characterisation, calibration, and design of visual-inertial sensor systems for robot navigation}, + author={Nikolic, Janosch}, + year={2016}, + school={ETH Zurich} +} + +@book{Simon06book, + title={Optimal state estimation: Kalman, H infinity, and nonlinear approaches}, + author={Simon, Dan}, + year={2006}, + publisher={John Wiley \& Sons} +} + +@inproceedings{Trawny05report_IndirectKF, + title={Indirect Kalman Filter for 3 D Attitude Estimation}, + author={Nikolas Trawny and Stergios I. Roumeliotis}, + year={2005} +} diff --git a/examples/CombinedImuFactorsExample.cpp b/examples/CombinedImuFactorsExample.cpp index 9211a4d5f..e0396ee81 100644 --- a/examples/CombinedImuFactorsExample.cpp +++ b/examples/CombinedImuFactorsExample.cpp @@ -60,13 +60,14 @@ namespace po = boost::program_options; po::variables_map parseOptions(int argc, char* argv[]) { po::options_description desc; - desc.add_options()("help,h", "produce help message")( - "data_csv_path", po::value()->default_value("imuAndGPSdata.csv"), - "path to the CSV file with the IMU data")( - "output_filename", - po::value()->default_value("imuFactorExampleResults.csv"), - "path to the result file to use")("use_isam", po::bool_switch(), - "use ISAM as the optimizer"); + desc.add_options()("help,h", "produce help message") // help message + ("data_csv_path", po::value()->default_value("imuAndGPSdata.csv"), + "path to the CSV file with the IMU data") // path to the data file + ("output_filename", + po::value()->default_value("imuFactorExampleResults.csv"), + "path to the result file to use") // filename to save results to + ("use_isam", po::bool_switch(), + "use ISAM as the optimizer"); // flag for ISAM optimizer po::variables_map vm; po::store(po::parse_command_line(argc, argv, desc), vm); @@ -106,7 +107,7 @@ boost::shared_ptr imuParams() { I_3x3 * 1e-8; // error committed in integrating position from velocities Matrix33 bias_acc_cov = I_3x3 * pow(accel_bias_rw_sigma, 2); Matrix33 bias_omega_cov = I_3x3 * pow(gyro_bias_rw_sigma, 2); - Matrix66 bias_acc_omega_int = + Matrix66 bias_acc_omega_init = I_6x6 * 1e-5; // error in the bias used for preintegration auto p = PreintegratedCombinedMeasurements::Params::MakeSharedD(0.0); @@ -122,7 +123,7 @@ boost::shared_ptr imuParams() { // PreintegrationCombinedMeasurements params: p->biasAccCovariance = bias_acc_cov; // acc bias in continuous p->biasOmegaCovariance = bias_omega_cov; // gyro bias in continuous - p->biasAccOmegaInt = bias_acc_omega_int; + p->biasAccOmegaInt = bias_acc_omega_init; return p; } diff --git a/examples/ImuFactorsExample.cpp b/examples/ImuFactorsExample.cpp index 38ee4c7c7..c17631864 100644 --- a/examples/ImuFactorsExample.cpp +++ b/examples/ImuFactorsExample.cpp @@ -94,7 +94,7 @@ boost::shared_ptr imuParams() { I_3x3 * 1e-8; // error committed in integrating position from velocities Matrix33 bias_acc_cov = I_3x3 * pow(accel_bias_rw_sigma, 2); Matrix33 bias_omega_cov = I_3x3 * pow(gyro_bias_rw_sigma, 2); - Matrix66 bias_acc_omega_int = + Matrix66 bias_acc_omega_init = I_6x6 * 1e-5; // error in the bias used for preintegration auto p = PreintegratedCombinedMeasurements::Params::MakeSharedD(0.0); @@ -110,7 +110,7 @@ boost::shared_ptr imuParams() { // PreintegrationCombinedMeasurements params: p->biasAccCovariance = bias_acc_cov; // acc bias in continuous p->biasOmegaCovariance = bias_omega_cov; // gyro bias in continuous - p->biasAccOmegaInt = bias_acc_omega_int; + p->biasAccOmegaInt = bias_acc_omega_init; return p; } diff --git a/examples/README.md b/examples/README.md index 5a72736e0..924a79521 100644 --- a/examples/README.md +++ b/examples/README.md @@ -33,7 +33,6 @@ The following examples illustrate some concepts from Georgia Tech's research pap ## 2D Pose SLAM * **LocalizationExample.cpp**: modeling robot motion -* **LocalizationExample2.cpp**: example with GPS like measurements * **Pose2SLAMExample**: A 2D Pose SLAM example using the predefined typedefs in gtsam/slam/pose2SLAM.h * **Pose2SLAMExample_advanced**: same, but uses an Optimizer object * **Pose2SLAMwSPCG**: solve a simple 3 by 3 grid of Pose2 SLAM problem by using easy SPCG interface diff --git a/gtsam/base/Group.h b/gtsam/base/Group.h index 39541416e..89fa8248c 100644 --- a/gtsam/base/Group.h +++ b/gtsam/base/Group.h @@ -95,7 +95,7 @@ template struct MultiplicativeGroupTraits { typedef group_tag structure_category; typedef multiplicative_group_tag group_flavor; - static Class Identity() { return Class::identity(); } + static Class Identity() { return Class::Identity(); } static Class Compose(const Class &g, const Class & h) { return g * h;} static Class Between(const Class &g, const Class & h) { return g.inverse() * h;} static Class Inverse(const Class &g) { return g.inverse();} @@ -111,7 +111,7 @@ template struct AdditiveGroupTraits { typedef group_tag structure_category; typedef additive_group_tag group_flavor; - static Class Identity() { return Class::identity(); } + static Class Identity() { return Class::Identity(); } static Class Compose(const Class &g, const Class & h) { return g + h;} static Class Between(const Class &g, const Class & h) { return h - g;} static Class Inverse(const Class &g) { return -g;} @@ -147,7 +147,7 @@ public: DirectProduct(const G& g, const H& h):std::pair(g,h) {} // identity - static DirectProduct identity() { return DirectProduct(); } + static DirectProduct Identity() { return DirectProduct(); } DirectProduct operator*(const DirectProduct& other) const { return DirectProduct(traits::Compose(this->first, other.first), @@ -181,7 +181,7 @@ public: DirectSum(const G& g, const H& h):std::pair(g,h) {} // identity - static DirectSum identity() { return DirectSum(); } + static DirectSum Identity() { return DirectSum(); } DirectSum operator+(const DirectSum& other) const { return DirectSum(g()+other.g(), h()+other.h()); diff --git a/gtsam/base/Lie.h b/gtsam/base/Lie.h index cb8e7d017..c4eba5deb 100644 --- a/gtsam/base/Lie.h +++ b/gtsam/base/Lie.h @@ -177,7 +177,7 @@ struct LieGroupTraits: GetDimensionImpl { /// @name Group /// @{ typedef multiplicative_group_tag group_flavor; - static Class Identity() { return Class::identity();} + static Class Identity() { return Class::Identity();} /// @} /// @name Manifold diff --git a/gtsam/base/ProductLieGroup.h b/gtsam/base/ProductLieGroup.h index a2dcf738e..6689c11d6 100644 --- a/gtsam/base/ProductLieGroup.h +++ b/gtsam/base/ProductLieGroup.h @@ -48,7 +48,7 @@ public: /// @name Group /// @{ typedef multiplicative_group_tag group_flavor; - static ProductLieGroup identity() {return ProductLieGroup();} + static ProductLieGroup Identity() {return ProductLieGroup();} ProductLieGroup operator*(const ProductLieGroup& other) const { return ProductLieGroup(traits::Compose(this->first,other.first), diff --git a/gtsam/base/Vector.h b/gtsam/base/Vector.h index 9cb2aa165..f7923ff88 100644 --- a/gtsam/base/Vector.h +++ b/gtsam/base/Vector.h @@ -60,6 +60,7 @@ GTSAM_MAKE_VECTOR_DEFS(9) GTSAM_MAKE_VECTOR_DEFS(10) GTSAM_MAKE_VECTOR_DEFS(11) GTSAM_MAKE_VECTOR_DEFS(12) +GTSAM_MAKE_VECTOR_DEFS(15) typedef Eigen::VectorBlock SubVector; typedef Eigen::VectorBlock ConstSubVector; diff --git a/gtsam/base/VectorSpace.h b/gtsam/base/VectorSpace.h index 43644b5c4..f7809f596 100644 --- a/gtsam/base/VectorSpace.h +++ b/gtsam/base/VectorSpace.h @@ -169,7 +169,7 @@ struct HasVectorSpacePrereqs { Vector v; BOOST_CONCEPT_USAGE(HasVectorSpacePrereqs) { - p = Class::identity(); // identity + p = Class::Identity(); // identity q = p + p; // addition q = p - p; // subtraction v = p.vector(); // conversion to vector @@ -192,7 +192,7 @@ struct VectorSpaceTraits: VectorSpaceImpl { /// @name Group /// @{ typedef additive_group_tag group_flavor; - static Class Identity() { return Class::identity();} + static Class Identity() { return Class::Identity();} /// @} /// @name Manifold diff --git a/gtsam/base/tests/testGroup.cpp b/gtsam/base/tests/testGroup.cpp index 976dee697..7e8cb7821 100644 --- a/gtsam/base/tests/testGroup.cpp +++ b/gtsam/base/tests/testGroup.cpp @@ -29,7 +29,7 @@ class Symmetric: private Eigen::PermutationMatrix { Eigen::PermutationMatrix(P) { } public: - static Symmetric identity() { return Symmetric(); } + static Symmetric Identity() { return Symmetric(); } Symmetric() { Eigen::PermutationMatrix::setIdentity(); } diff --git a/gtsam/base/treeTraversal-inst.h b/gtsam/base/treeTraversal-inst.h index 30cec3b9a..be45a248e 100644 --- a/gtsam/base/treeTraversal-inst.h +++ b/gtsam/base/treeTraversal-inst.h @@ -221,6 +221,6 @@ void PrintForest(const FOREST& forest, std::string str, PrintForestVisitorPre visitor(keyFormatter); DepthFirstForest(forest, str, visitor); } -} +} // namespace treeTraversal -} +} // namespace gtsam diff --git a/gtsam/basis/ParameterMatrix.h b/gtsam/basis/ParameterMatrix.h index eddcbfeae..3f830ec51 100644 --- a/gtsam/basis/ParameterMatrix.h +++ b/gtsam/basis/ParameterMatrix.h @@ -189,9 +189,9 @@ class ParameterMatrix { * NOTE: The size at compile time is unknown so this identity is zero * length and thus not valid. */ - inline static ParameterMatrix identity() { + inline static ParameterMatrix Identity() { // throw std::runtime_error( - // "ParameterMatrix::identity(): Don't use this function"); + // "ParameterMatrix::Identity(): Don't use this function"); return ParameterMatrix(0); } diff --git a/gtsam/basis/basis.i b/gtsam/basis/basis.i index a6c9d87ee..0cd87ba65 100644 --- a/gtsam/basis/basis.i +++ b/gtsam/basis/basis.i @@ -137,7 +137,7 @@ class FitBasis { static gtsam::GaussianFactorGraph::shared_ptr LinearGraph( const std::map& sequence, const gtsam::noiseModel::Base* model, size_t N); - This::Parameters parameters() const; + gtsam::This::Parameters parameters() const; }; } // namespace gtsam diff --git a/gtsam/basis/tests/testParameterMatrix.cpp b/gtsam/basis/tests/testParameterMatrix.cpp index ec62e8eea..ae2e8e111 100644 --- a/gtsam/basis/tests/testParameterMatrix.cpp +++ b/gtsam/basis/tests/testParameterMatrix.cpp @@ -133,7 +133,7 @@ TEST(ParameterMatrix, VectorSpace) { // vector EXPECT(assert_equal(Vector::Ones(M * N), params.vector())); // identity - EXPECT(assert_equal(ParameterMatrix::identity(), + EXPECT(assert_equal(ParameterMatrix::Identity(), ParameterMatrix(Matrix::Zero(M, 0)))); } diff --git a/gtsam/discrete/DiscreteKey.cpp b/gtsam/discrete/DiscreteKey.cpp index 121d61103..06ed2ca3b 100644 --- a/gtsam/discrete/DiscreteKey.cpp +++ b/gtsam/discrete/DiscreteKey.cpp @@ -48,4 +48,25 @@ namespace gtsam { return keys & key2; } + void DiscreteKeys::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + for (auto&& dkey : *this) { + std::cout << DefaultKeyFormatter(dkey.first) << " " << dkey.second + << std::endl; + } + } + + bool DiscreteKeys::equals(const DiscreteKeys& other, double tol) const { + if (this->size() != other.size()) { + return false; + } + + for (size_t i = 0; i < this->size(); i++) { + if (this->at(i).first != other.at(i).first || + this->at(i).second != other.at(i).second) { + return false; + } + } + return true; + } } diff --git a/gtsam/discrete/DiscreteKey.h b/gtsam/discrete/DiscreteKey.h index dea00074d..8e0802d83 100644 --- a/gtsam/discrete/DiscreteKey.h +++ b/gtsam/discrete/DiscreteKey.h @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -69,8 +70,30 @@ namespace gtsam { push_back(key); return *this; } + + /// Print the keys and cardinalities. + void print(const std::string& s = "", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /// Check equality to another DiscreteKeys object. + bool equals(const DiscreteKeys& other, double tol = 0) const; + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& boost::serialization::make_nvp( + "DiscreteKeys", + boost::serialization::base_object>(*this)); + } + }; // DiscreteKeys /// Create a list from two keys GTSAM_EXPORT DiscreteKeys operator&(const DiscreteKey& key1, const DiscreteKey& key2); -} + + // traits + template <> + struct traits : public Testable {}; + + } // namespace gtsam diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index b3a12a8d5..fa98f36fa 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -219,6 +219,16 @@ class DiscreteBayesTree { }; #include + +class DiscreteLookupTable : gtsam::DiscreteConditional{ + DiscreteLookupTable(size_t nFrontals, const gtsam::DiscreteKeys& keys, + const gtsam::DecisionTreeFactor::ADT& potentials); + void print( + const std::string& s = "Discrete Lookup Table: ", + const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; + size_t argmax(const gtsam::DiscreteValues& parentsValues) const; +}; + class DiscreteLookupDAG { DiscreteLookupDAG(); void push_back(const gtsam::DiscreteLookupTable* table); diff --git a/gtsam/discrete/tests/testDiscreteFactor.cpp b/gtsam/discrete/tests/testDiscreteFactor.cpp index 8681cf7eb..db0491c9d 100644 --- a/gtsam/discrete/tests/testDiscreteFactor.cpp +++ b/gtsam/discrete/tests/testDiscreteFactor.cpp @@ -16,14 +16,29 @@ * @author Duy-Nguyen Ta */ -#include -#include #include +#include +#include +#include + #include using namespace boost::assign; using namespace std; using namespace gtsam; +using namespace gtsam::serializationTestHelpers; + +/* ************************************************************************* */ +TEST(DisreteKeys, Serialization) { + DiscreteKeys keys; + keys& DiscreteKey(0, 2); + keys& DiscreteKey(1, 3); + keys& DiscreteKey(2, 4); + + EXPECT(equalsObj(keys)); + EXPECT(equalsXML(keys)); + EXPECT(equalsBinary(keys)); +} /* ************************************************************************* */ int main() { @@ -31,4 +46,3 @@ int main() { return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ - diff --git a/gtsam/geometry/Cyclic.h b/gtsam/geometry/Cyclic.h index 065cd0140..a503a6072 100644 --- a/gtsam/geometry/Cyclic.h +++ b/gtsam/geometry/Cyclic.h @@ -38,7 +38,7 @@ public: /// Default constructor yields identity Cyclic():i_(0) { } - static Cyclic identity() { return Cyclic();} + static Cyclic Identity() { return Cyclic();} /// Cast to size_t operator size_t() const { diff --git a/gtsam/geometry/PinholeCamera.h b/gtsam/geometry/PinholeCamera.h index c20e90819..e5a8eec7a 100644 --- a/gtsam/geometry/PinholeCamera.h +++ b/gtsam/geometry/PinholeCamera.h @@ -213,7 +213,7 @@ public: } /// for Canonical - static PinholeCamera identity() { + static PinholeCamera Identity() { return PinholeCamera(); // assumes that the default constructor is valid } diff --git a/gtsam/geometry/PinholePose.h b/gtsam/geometry/PinholePose.h index 7b92be5d5..ce393e45f 100644 --- a/gtsam/geometry/PinholePose.h +++ b/gtsam/geometry/PinholePose.h @@ -412,7 +412,7 @@ public: } /// for Canonical - static PinholePose identity() { + static PinholePose Identity() { return PinholePose(); // assumes that the default constructor is valid } diff --git a/gtsam/geometry/Pose2.h b/gtsam/geometry/Pose2.h index 466c5a42a..008e6525d 100644 --- a/gtsam/geometry/Pose2.h +++ b/gtsam/geometry/Pose2.h @@ -119,7 +119,7 @@ public: /// @{ /// identity for group operation - inline static Pose2 identity() { return Pose2(); } + inline static Pose2 Identity() { return Pose2(); } /// inverse GTSAM_EXPORT Pose2 inverse() const; diff --git a/gtsam/geometry/Pose3.h b/gtsam/geometry/Pose3.h index 33fb55250..6dec7f74f 100644 --- a/gtsam/geometry/Pose3.h +++ b/gtsam/geometry/Pose3.h @@ -103,7 +103,7 @@ public: /// @{ /// identity for group operation - static Pose3 identity() { + static Pose3 Identity() { return Pose3(); } diff --git a/gtsam/geometry/Rot2.h b/gtsam/geometry/Rot2.h index 2690ca248..be8d11cda 100644 --- a/gtsam/geometry/Rot2.h +++ b/gtsam/geometry/Rot2.h @@ -107,8 +107,8 @@ namespace gtsam { /// @name Group /// @{ - /** identity */ - inline static Rot2 identity() { return Rot2(); } + /** Identity */ + inline static Rot2 Identity() { return Rot2(); } /** The inverse rotation - negative angle */ Rot2 inverse() const { return Rot2(c_, -s_);} diff --git a/gtsam/geometry/Rot3.h b/gtsam/geometry/Rot3.h index 01ca7778c..800ce4cdf 100644 --- a/gtsam/geometry/Rot3.h +++ b/gtsam/geometry/Rot3.h @@ -297,7 +297,7 @@ class GTSAM_EXPORT Rot3 : public LieGroup { /// @{ /// identity rotation for group operation - inline static Rot3 identity() { + inline static Rot3 Identity() { return Rot3(); } diff --git a/gtsam/geometry/SOn.h b/gtsam/geometry/SOn.h index af0e7a3cf..7d1e6db7a 100644 --- a/gtsam/geometry/SOn.h +++ b/gtsam/geometry/SOn.h @@ -178,13 +178,13 @@ class SO : public LieGroup, internal::DimensionSO(N)> { /// SO identity for N >= 2 template > - static SO identity() { + static SO Identity() { return SO(); } /// SO identity for N == Eigen::Dynamic template > - static SO identity(size_t n = 0) { + static SO Identity(size_t n = 0) { return SO(n); } diff --git a/gtsam/geometry/Similarity2.cpp b/gtsam/geometry/Similarity2.cpp index 4ed3351f8..4b8cfd581 100644 --- a/gtsam/geometry/Similarity2.cpp +++ b/gtsam/geometry/Similarity2.cpp @@ -134,7 +134,7 @@ void Similarity2::print(const std::string& s) const { << std::endl; } -Similarity2 Similarity2::identity() { return Similarity2(); } +Similarity2 Similarity2::Identity() { return Similarity2(); } Similarity2 Similarity2::operator*(const Similarity2& S) const { return Similarity2(R_ * S.R_, ((1.0 / S.s_) * t_) + R_ * S.t_, s_ * S.s_); diff --git a/gtsam/geometry/Similarity2.h b/gtsam/geometry/Similarity2.h index 05f10d149..e72cda484 100644 --- a/gtsam/geometry/Similarity2.h +++ b/gtsam/geometry/Similarity2.h @@ -83,7 +83,7 @@ class GTSAM_EXPORT Similarity2 : public LieGroup { /// @{ /// Return an identity transform - static Similarity2 identity(); + static Similarity2 Identity(); /// Composition Similarity2 operator*(const Similarity2& S) const; diff --git a/gtsam/geometry/Similarity3.cpp b/gtsam/geometry/Similarity3.cpp index 7fde974c5..146161796 100644 --- a/gtsam/geometry/Similarity3.cpp +++ b/gtsam/geometry/Similarity3.cpp @@ -122,7 +122,7 @@ void Similarity3::print(const std::string& s) const { std::cout << "t: " << translation().transpose() << " s: " << scale() << std::endl; } -Similarity3 Similarity3::identity() { +Similarity3 Similarity3::Identity() { return Similarity3(); } Similarity3 Similarity3::operator*(const Similarity3& S) const { diff --git a/gtsam/geometry/Similarity3.h b/gtsam/geometry/Similarity3.h index 845d4c810..76d2bf536 100644 --- a/gtsam/geometry/Similarity3.h +++ b/gtsam/geometry/Similarity3.h @@ -84,7 +84,7 @@ class GTSAM_EXPORT Similarity3 : public LieGroup { /// @{ /// Return an identity transform - static Similarity3 identity(); + static Similarity3 Identity(); /// Composition Similarity3 operator*(const Similarity3& S) const; diff --git a/gtsam/geometry/SphericalCamera.h b/gtsam/geometry/SphericalCamera.h index 4880423d3..adcbe46a5 100644 --- a/gtsam/geometry/SphericalCamera.h +++ b/gtsam/geometry/SphericalCamera.h @@ -88,7 +88,7 @@ class GTSAM_EXPORT SphericalCamera { /// Default constructor SphericalCamera() - : pose_(Pose3::identity()), emptyCal_(boost::make_shared()) {} + : pose_(Pose3()), emptyCal_(boost::make_shared()) {} /// Constructor with pose explicit SphericalCamera(const Pose3& pose) @@ -198,9 +198,9 @@ class GTSAM_EXPORT SphericalCamera { } /// for Canonical - static SphericalCamera identity() { + static SphericalCamera Identity() { return SphericalCamera( - Pose3::identity()); // assumes that the default constructor is valid + Pose3::Identity()); // assumes that the default constructor is valid } /// for Linear Triangulation diff --git a/gtsam/geometry/StereoPoint2.h b/gtsam/geometry/StereoPoint2.h index 9eef01577..8c9197fd2 100644 --- a/gtsam/geometry/StereoPoint2.h +++ b/gtsam/geometry/StereoPoint2.h @@ -71,7 +71,7 @@ public: /// @{ /// identity - inline static StereoPoint2 identity() { + inline static StereoPoint2 Identity() { return StereoPoint2(); } diff --git a/gtsam/geometry/geometry.i b/gtsam/geometry/geometry.i index 4ebff6c03..988727b98 100644 --- a/gtsam/geometry/geometry.i +++ b/gtsam/geometry/geometry.i @@ -16,7 +16,7 @@ class Point2 { bool equals(const gtsam::Point2& point, double tol) const; // Group - static gtsam::Point2 identity(); + static gtsam::Point2 Identity(); // Standard Interface double x() const; @@ -73,7 +73,7 @@ class StereoPoint2 { bool equals(const gtsam::StereoPoint2& point, double tol) const; // Group - static gtsam::StereoPoint2 identity(); + static gtsam::StereoPoint2 Identity(); gtsam::StereoPoint2 inverse() const; gtsam::StereoPoint2 compose(const gtsam::StereoPoint2& p2) const; gtsam::StereoPoint2 between(const gtsam::StereoPoint2& p2) const; @@ -115,7 +115,7 @@ class Point3 { bool equals(const gtsam::Point3& p, double tol) const; // Group - static gtsam::Point3 identity(); + static gtsam::Point3 Identity(); // Standard Interface Vector vector() const; @@ -149,7 +149,7 @@ class Rot2 { bool equals(const gtsam::Rot2& rot, double tol) const; // Group - static gtsam::Rot2 identity(); + static gtsam::Rot2 Identity(); gtsam::Rot2 inverse(); gtsam::Rot2 compose(const gtsam::Rot2& p2) const; gtsam::Rot2 between(const gtsam::Rot2& p2) const; @@ -198,7 +198,7 @@ class SO3 { bool equals(const gtsam::SO3& other, double tol) const; // Group - static gtsam::SO3 identity(); + static gtsam::SO3 Identity(); gtsam::SO3 inverse() const; gtsam::SO3 between(const gtsam::SO3& R) const; gtsam::SO3 compose(const gtsam::SO3& R) const; @@ -228,7 +228,7 @@ class SO4 { bool equals(const gtsam::SO4& other, double tol) const; // Group - static gtsam::SO4 identity(); + static gtsam::SO4 Identity(); gtsam::SO4 inverse() const; gtsam::SO4 between(const gtsam::SO4& Q) const; gtsam::SO4 compose(const gtsam::SO4& Q) const; @@ -258,7 +258,7 @@ class SOn { bool equals(const gtsam::SOn& other, double tol) const; // Group - static gtsam::SOn identity(); + static gtsam::SOn Identity(); gtsam::SOn inverse() const; gtsam::SOn between(const gtsam::SOn& Q) const; gtsam::SOn compose(const gtsam::SOn& Q) const; @@ -322,7 +322,7 @@ class Rot3 { bool equals(const gtsam::Rot3& rot, double tol) const; // Group - static gtsam::Rot3 identity(); + static gtsam::Rot3 Identity(); gtsam::Rot3 inverse() const; gtsam::Rot3 compose(const gtsam::Rot3& p2) const; gtsam::Rot3 between(const gtsam::Rot3& p2) const; @@ -380,7 +380,7 @@ class Pose2 { bool equals(const gtsam::Pose2& pose, double tol) const; // Group - static gtsam::Pose2 identity(); + static gtsam::Pose2 Identity(); gtsam::Pose2 inverse() const; gtsam::Pose2 compose(const gtsam::Pose2& p2) const; gtsam::Pose2 between(const gtsam::Pose2& p2) const; @@ -444,7 +444,7 @@ class Pose3 { bool equals(const gtsam::Pose3& pose, double tol) const; // Group - static gtsam::Pose3 identity(); + static gtsam::Pose3 Identity(); gtsam::Pose3 inverse() const; gtsam::Pose3 inverse(Eigen::Ref H) const; gtsam::Pose3 compose(const gtsam::Pose3& pose) const; @@ -1126,10 +1126,10 @@ class TriangulationResult { Status status; TriangulationResult(const gtsam::Point3& p); const gtsam::Point3& get() const; - static TriangulationResult Degenerate(); - static TriangulationResult Outlier(); - static TriangulationResult FarPoint(); - static TriangulationResult BehindCamera(); + static gtsam::TriangulationResult Degenerate(); + static gtsam::TriangulationResult Outlier(); + static gtsam::TriangulationResult FarPoint(); + static gtsam::TriangulationResult BehindCamera(); bool valid() const; bool degenerate() const; bool outlier() const; diff --git a/gtsam/geometry/tests/testPose2.cpp b/gtsam/geometry/tests/testPose2.cpp index de779cc75..88c00a2e9 100644 --- a/gtsam/geometry/tests/testPose2.cpp +++ b/gtsam/geometry/tests/testPose2.cpp @@ -902,7 +902,7 @@ TEST(Pose2 , TransformCovariance3) { /* ************************************************************************* */ TEST(Pose2, Print) { - Pose2 pose(Rot2::identity(), Point2(1, 2)); + Pose2 pose(Rot2::Identity(), Point2(1, 2)); // Generate the expected output string s = "Planar Pose"; diff --git a/gtsam/geometry/tests/testPose3.cpp b/gtsam/geometry/tests/testPose3.cpp index e1d3d5ea2..78a4af799 100644 --- a/gtsam/geometry/tests/testPose3.cpp +++ b/gtsam/geometry/tests/testPose3.cpp @@ -1133,7 +1133,7 @@ Pose3 testing_interpolate(const Pose3& t1, const Pose3& t2, double gamma) { retu TEST(Pose3, interpolateJacobians) { { - Pose3 X = Pose3::identity(); + Pose3 X = Pose3::Identity(); Pose3 Y(Rot3::Rz(M_PI_2), Point3(1, 0, 0)); double t = 0.5; Pose3 expectedPoseInterp(Rot3::Rz(M_PI_4), Point3(0.5, -0.207107, 0)); // note: different from test above: this is full Pose3 interpolation @@ -1147,10 +1147,10 @@ TEST(Pose3, interpolateJacobians) { EXPECT(assert_equal(expectedJacobianY,actualJacobianY,1e-6)); } { - Pose3 X = Pose3::identity(); - Pose3 Y(Rot3::identity(), Point3(1, 0, 0)); + Pose3 X = Pose3::Identity(); + Pose3 Y(Rot3::Identity(), Point3(1, 0, 0)); double t = 0.3; - Pose3 expectedPoseInterp(Rot3::identity(), Point3(0.3, 0, 0)); + Pose3 expectedPoseInterp(Rot3::Identity(), Point3(0.3, 0, 0)); Matrix actualJacobianX, actualJacobianY; EXPECT(assert_equal(expectedPoseInterp, interpolate(X, Y, t, actualJacobianX, actualJacobianY), 1e-5)); @@ -1161,7 +1161,7 @@ TEST(Pose3, interpolateJacobians) { EXPECT(assert_equal(expectedJacobianY,actualJacobianY,1e-6)); } { - Pose3 X = Pose3::identity(); + Pose3 X = Pose3::Identity(); Pose3 Y(Rot3::Rz(M_PI_2), Point3(0, 0, 0)); double t = 0.5; Pose3 expectedPoseInterp(Rot3::Rz(M_PI_4), Point3(0, 0, 0)); @@ -1204,7 +1204,7 @@ TEST(Pose3, Create) { /* ************************************************************************* */ TEST(Pose3, Print) { - Pose3 pose(Rot3::identity(), Point3(1, 2, 3)); + Pose3 pose(Rot3::Identity(), Point3(1, 2, 3)); // Generate the expected output std::string expected = "R: [\n\t1, 0, 0;\n\t0, 1, 0;\n\t0, 0, 1\n]\nt: 1 2 3\n"; diff --git a/gtsam/geometry/tests/testSimilarity2.cpp b/gtsam/geometry/tests/testSimilarity2.cpp index dd4fd0efd..ca041fc7b 100644 --- a/gtsam/geometry/tests/testSimilarity2.cpp +++ b/gtsam/geometry/tests/testSimilarity2.cpp @@ -33,8 +33,6 @@ static const Point2 P(0.2, 0.7); static const Rot2 R = Rot2::fromAngle(0.3); static const double s = 4; -const double degree = M_PI / 180; - //****************************************************************************** TEST(Similarity2, Concepts) { BOOST_CONCEPT_ASSERT((IsGroup)); diff --git a/gtsam/geometry/tests/testSimilarity3.cpp b/gtsam/geometry/tests/testSimilarity3.cpp index 7a134f6ef..fad24f743 100644 --- a/gtsam/geometry/tests/testSimilarity3.cpp +++ b/gtsam/geometry/tests/testSimilarity3.cpp @@ -203,11 +203,11 @@ TEST(Similarity3, ExpLogMap) { Vector7 zeros; zeros << 0, 0, 0, 0, 0, 0, 0; - Vector7 logIdentity = Similarity3::Logmap(Similarity3::identity()); + Vector7 logIdentity = Similarity3::Logmap(Similarity3::Identity()); EXPECT(assert_equal(zeros, logIdentity)); Similarity3 expZero = Similarity3::Expmap(zeros); - Similarity3 ident = Similarity3::identity(); + Similarity3 ident = Similarity3::Identity(); EXPECT(assert_equal(expZero, ident)); // Compare to matrix exponential, using expm in Lie.h @@ -391,7 +391,7 @@ TEST(Similarity3, Optimization) { NonlinearFactorGraph graph; graph.addPrior(key, prior, model); - // Create initial estimate with identity transform + // Create initial estimate with Identity transform Values initial; initial.insert(key, Similarity3()); diff --git a/gtsam/gtsam.i b/gtsam/gtsam.i index d4e959c3d..00b4d05f8 100644 --- a/gtsam/gtsam.i +++ b/gtsam/gtsam.i @@ -66,7 +66,7 @@ class KeySet { void serialize() const; }; -// Actually a vector +// Actually a vector, needed for Matlab class KeyVector { KeyVector(); KeyVector(const gtsam::KeyVector& other); diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index b04800d21..6816dfbf6 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -119,11 +119,36 @@ void GaussianMixture::print(const std::string &s, "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; - if (!gf->empty()) + if (gf && !gf->empty()) gf->print("", formatter); else return {"nullptr"}; return rd.str(); }); } + +/* *******************************************************************************/ +void GaussianMixture::prune(const DecisionTreeFactor &decisionTree) { + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&decisionTree]( + const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if (decisionTree(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + auto pruned_conditionals = conditionals_.apply(pruner); + conditionals_.root_ = pruned_conditionals.root_; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index fc1eb0f06..75deb4d55 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -30,11 +31,14 @@ namespace gtsam { /** * @brief A conditional of gaussian mixtures indexed by discrete variables, as - * part of a Bayes Network. + * part of a Bayes Network. This is the result of the elimination of a + * continuous variable in a hybrid scheme, such that the remaining variables are + * discrete+continuous. * - * Represents the conditional density P(X | M, Z) where X is a continuous random - * variable, M is the selection of discrete variables corresponding to a subset - * of the Gaussian variables and Z is parent of this node + * Represents the conditional density P(X | M, Z) where X is the set of + * continuous random variables, M is the selection of discrete variables + * corresponding to a subset of the Gaussian variables and Z is parent of this + * node . * * The probability P(x|y,z,...) is proportional to * \f$ \sum_i k_i \exp - \frac{1}{2} |R_i x - (d_i - S_i y - T_i z - ...)|^2 \f$ @@ -118,7 +122,7 @@ class GTSAM_EXPORT GaussianMixture /// Test equality with base HybridFactor bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - /* print utility */ + /// Print utility void print( const std::string &s = "GaussianMixture\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; @@ -128,6 +132,15 @@ class GTSAM_EXPORT GaussianMixture /// Getter for the underlying Conditionals DecisionTree const Conditionals &conditionals(); + /** + * @brief Prune the decision tree of Gaussian factors as per the discrete + * `decisionTree`. + * + * @param decisionTree A pruned decision tree of discrete keys where the + * leaves are probabilities. + */ + void prune(const DecisionTreeFactor &decisionTree); + /** * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while * maintaining the decision tree structure. diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8f832d8ea..9b5be188a 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -51,7 +51,7 @@ GaussianMixtureFactor GaussianMixtureFactor::FromFactors( void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); - std::cout << "]{\n"; + std::cout << "{\n"; factors_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianFactor::shared_ptr &gf) -> std::string { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6c90ee6a7..a0a51af55 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -22,7 +22,7 @@ #include #include -#include +#include #include namespace gtsam { @@ -108,7 +108,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "GaussianMixtureFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1292711d8..bca50a902 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -10,7 +10,166 @@ * @file HybridBayesNet.cpp * @brief A bayes net of Gaussian Conditionals indexed by discrete keys. * @author Fan Jiang + * @author Varun Agrawal + * @author Shangjie Xue * @date January 2022 */ +#include +#include #include +#include + +namespace gtsam { + +/* ************************************************************************* */ +/// Return the DiscreteKey vector as a set. +static std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys) { + std::set s; + s.insert(dkeys.begin(), dkeys.end()); + return s; +} + +/* ************************************************************************* */ +HybridBayesNet HybridBayesNet::prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const { + /* To Prune, we visitWith every leaf in the GaussianMixture. + * For each leaf, using the assignment we can check the discrete decision tree + * for 0.0 probability, then just set the leaf to a nullptr. + * + * We can later check the GaussianMixture for just nullptrs. + */ + + HybridBayesNet prunedBayesNetFragment; + + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&](const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if ((*discreteFactor)(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + // Go through all the conditionals in the + // Bayes Net and prune them as per discreteFactor. + for (size_t i = 0; i < this->size(); i++) { + HybridConditional::shared_ptr conditional = this->at(i); + + GaussianMixture::shared_ptr gaussianMixture = + boost::dynamic_pointer_cast(conditional->inner()); + + if (gaussianMixture) { + // We may have mixtures with less discrete keys than discreteFactor so we + // skip those since the label assignment does not exist. + auto gmKeySet = DiscreteKeysAsSet(gaussianMixture->discreteKeys()); + auto dfKeySet = DiscreteKeysAsSet(discreteFactor->discreteKeys()); + if (gmKeySet != dfKeySet) { + // Add the gaussianMixture which doesn't have to be pruned. + prunedBayesNetFragment.push_back( + boost::make_shared(gaussianMixture)); + continue; + } + + // Run the pruning to get a new, pruned tree + GaussianMixture::Conditionals prunedTree = + gaussianMixture->conditionals().apply(pruner); + + DiscreteKeys discreteKeys = gaussianMixture->discreteKeys(); + // reverse keys to get a natural ordering + std::reverse(discreteKeys.begin(), discreteKeys.end()); + + // Convert from boost::iterator_range to KeyVector + // so we can pass it to constructor. + KeyVector frontals(gaussianMixture->frontals().begin(), + gaussianMixture->frontals().end()), + parents(gaussianMixture->parents().begin(), + gaussianMixture->parents().end()); + + // Create the new gaussian mixture and add it to the bayes net. + auto prunedGaussianMixture = boost::make_shared( + frontals, parents, discreteKeys, prunedTree); + + // Type-erase and add to the pruned Bayes Net fragment. + prunedBayesNetFragment.push_back( + boost::make_shared(prunedGaussianMixture)); + + } else { + // Add the non-GaussianMixture conditional + prunedBayesNetFragment.push_back(conditional); + } + } + + return prunedBayesNetFragment; +} + +/* ************************************************************************* */ +GaussianMixture::shared_ptr HybridBayesNet::atMixture(size_t i) const { + return factors_.at(i)->asMixture(); +} + +/* ************************************************************************* */ +GaussianConditional::shared_ptr HybridBayesNet::atGaussian(size_t i) const { + return factors_.at(i)->asGaussian(); +} + +/* ************************************************************************* */ +DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { + return factors_.at(i)->asDiscreteConditional(); +} + +/* ************************************************************************* */ +GaussianBayesNet HybridBayesNet::choose( + const DiscreteValues &assignment) const { + GaussianBayesNet gbn; + for (size_t idx = 0; idx < size(); idx++) { + if (factors_.at(idx)->isHybrid()) { + // If factor is hybrid, select based on assignment. + GaussianMixture gm = *this->atMixture(idx); + gbn.push_back(gm(assignment)); + + } else if (factors_.at(idx)->isContinuous()) { + // If continuous only, add gaussian conditional. + gbn.push_back((this->atGaussian(idx))); + + } else if (factors_.at(idx)->isDiscrete()) { + // If factor at `idx` is discrete-only, we simply continue. + continue; + } + } + + return gbn; +} + +/* *******************************************************************************/ +HybridValues HybridBayesNet::optimize() const { + // Solve for the MPE + DiscreteBayesNet discrete_bn; + for (auto &conditional : factors_) { + if (conditional->isDiscrete()) { + discrete_bn.push_back(conditional->asDiscreteConditional()); + } + } + + DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); + + // Given the MPE, compute the optimal continuous values. + GaussianBayesNet gbn = this->choose(mpe); + return HybridValues(mpe, gbn.optimize()); +} + +/* *******************************************************************************/ +VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { + GaussianBayesNet gbn = this->choose(assignment); + return gbn.optimize(); +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 43eead280..e84103a50 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -17,8 +17,12 @@ #pragma once +#include +#include #include +#include #include +#include namespace gtsam { @@ -34,8 +38,94 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { using shared_ptr = boost::shared_ptr; using sharedConditional = boost::shared_ptr; + /// @name Standard Constructors + /// @{ + /** Construct empty bayes net */ HybridBayesNet() = default; + + /// @} + /// @name Testable + /// @{ + + /** Check equality */ + bool equals(const This &bn, double tol = 1e-9) const { + return Base::equals(bn, tol); + } + + /// print graph + void print( + const std::string &s = "", + const KeyFormatter &formatter = DefaultKeyFormatter) const override { + Base::print(s, formatter); + } + + /// @} + /// @name Standard Interface + /// @{ + + /// Add HybridConditional to Bayes Net + using Base::add; + + /// Add a discrete conditional to the Bayes Net. + void add(const DiscreteKey &key, const std::string &table) { + push_back( + HybridConditional(boost::make_shared(key, table))); + } + + /// Get a specific Gaussian mixture by index `i`. + GaussianMixture::shared_ptr atMixture(size_t i) const; + + /// Get a specific Gaussian conditional by index `i`. + GaussianConditional::shared_ptr atGaussian(size_t i) const; + + /// Get a specific discrete conditional by index `i`. + DiscreteConditional::shared_ptr atDiscrete(size_t i) const; + + /** + * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete + * value assignment. + * + * @param assignment The discrete value assignment for the discrete keys. + * @return GaussianBayesNet + */ + GaussianBayesNet choose(const DiscreteValues &assignment) const; + + /** + * @brief Solve the HybridBayesNet by first computing the MPE of all the + * discrete variables and then optimizing the continuous variables based on + * the MPE assignment. + * + * @return HybridValues + */ + HybridValues optimize() const; + + /** + * @brief Given the discrete assignment, return the optimized estimate for the + * selected Gaussian BayesNet. + * + * @param assignment An assignment of discrete values. + * @return Values + */ + VectorValues optimize(const DiscreteValues &assignment) const; + + /// Prune the Hybrid Bayes Net given the discrete decision tree. + HybridBayesNet prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const; + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } }; +/// traits +template <> +struct traits : public Testable {}; + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index d65270f91..3fa4d6268 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -18,10 +18,13 @@ */ #include +#include +#include #include #include #include #include +#include namespace gtsam { @@ -35,4 +38,110 @@ bool HybridBayesTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } +/* ************************************************************************* */ +HybridValues HybridBayesTree::optimize() const { + DiscreteBayesNet dbn; + DiscreteValues mpe; + + auto root = roots_.at(0); + // Access the clique and get the underlying hybrid conditional + HybridConditional::shared_ptr root_conditional = root->conditional(); + + // The root should be discrete only, we compute the MPE + if (root_conditional->isDiscrete()) { + dbn.push_back(root_conditional->asDiscreteConditional()); + mpe = DiscreteFactorGraph(dbn).optimize(); + } else { + throw std::runtime_error( + "HybridBayesTree root is not discrete-only. Please check elimination " + "ordering or use continuous factor graph."); + } + + VectorValues values = optimize(mpe); + return HybridValues(mpe, values); +} + +/* ************************************************************************* */ +/** + * @brief Helper class for Depth First Forest traversal on the HybridBayesTree. + * + * When traversing the tree, the pre-order visitor will receive an instance of + * this class with the parent clique data. + */ +struct HybridAssignmentData { + const DiscreteValues assignment_; + GaussianBayesTree::sharedNode parentClique_; + // The gaussian bayes tree that will be recursively created. + GaussianBayesTree* gaussianbayesTree_; + + /** + * @brief Construct a new Hybrid Assignment Data object. + * + * @param assignment The MPE assignment for the optimal Gaussian cliques. + * @param parentClique The clique from the parent node of the current node. + * @param gbt The Gaussian Bayes Tree being generated during tree traversal. + */ + HybridAssignmentData(const DiscreteValues& assignment, + const GaussianBayesTree::sharedNode& parentClique, + GaussianBayesTree* gbt) + : assignment_(assignment), + parentClique_(parentClique), + gaussianbayesTree_(gbt) {} + + /** + * @brief A function used during tree traversal that operators on each node + * before visiting the node's children. + * + * @param node The current node being visited. + * @param parentData The HybridAssignmentData from the parent node. + * @return HybridAssignmentData + */ + static HybridAssignmentData AssignmentPreOrderVisitor( + const HybridBayesTree::sharedNode& node, + HybridAssignmentData& parentData) { + // Extract the gaussian conditional from the Hybrid clique + HybridConditional::shared_ptr hybrid_conditional = node->conditional(); + GaussianConditional::shared_ptr conditional; + if (hybrid_conditional->isHybrid()) { + conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); + } else if (hybrid_conditional->isContinuous()) { + conditional = hybrid_conditional->asGaussian(); + } else { + // Discrete only conditional, so we set to empty gaussian conditional + conditional = boost::make_shared(); + } + + // Create the GaussianClique for the current node + auto clique = boost::make_shared(conditional); + // Add the current clique to the GaussianBayesTree. + parentData.gaussianbayesTree_->addClique(clique, parentData.parentClique_); + + // Create new HybridAssignmentData where the current node is the parent + // This will be passed down to the children nodes + HybridAssignmentData data(parentData.assignment_, clique, + parentData.gaussianbayesTree_); + return data; + } +}; + +/* ************************************************************************* + */ +VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { + GaussianBayesTree gbt; + HybridAssignmentData rootData(assignment, 0, &gbt); + { + treeTraversal::no_op visitorPost; + // Limits OpenMP threads since we're mixing TBB and OpenMP + TbbOpenMPMixedScope threadLimiter; + treeTraversal::DepthFirstForestParallel( + *this, rootData, HybridAssignmentData::AssignmentPreOrderVisitor, + visitorPost); + } + + VectorValues result = gbt.optimize(); + + // Return the optimized bayes net result. + return result; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 0b89ca8c4..3fa344d4d 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -70,13 +70,46 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** Check equality */ bool equals(const This& other, double tol = 1e-9) const; + /** + * @brief Optimize the hybrid Bayes tree by computing the MPE for the current + * set of discrete variables and using it to compute the best continuous + * update delta. + * + * @return HybridValues + */ + HybridValues optimize() const; + + /** + * @brief Recursively optimize the BayesTree to produce a vector solution. + * + * @param assignment The discrete values assignment to select the Gaussian + * mixtures. + * @return VectorValues + */ + VectorValues optimize(const DiscreteValues& assignment) const; + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } }; +/// traits +template <> +struct traits : public Testable {}; + /** * @brief Class for Hybrid Bayes tree orphan subtrees. * - * This does special stuff for the hybrid case + * This object stores parent keys in our base type factor so that + * eliminating those parent keys will pull this subtree into the + * elimination. + * This does special stuff for the hybrid case. * * @tparam CLIQUE */ diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 3ba5da393..b43bb9945 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridConditional BaseConditional; ///< Typedef to our conditional base class protected: - // Type-erased pointer to the inner type + /// Type-erased pointer to the inner type boost::shared_ptr inner_; public: @@ -127,8 +127,7 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional( - boost::shared_ptr gaussianMixture); + HybridConditional(boost::shared_ptr gaussianMixture); /** * @brief Return HybridConditional as a GaussianMixture @@ -140,6 +139,17 @@ class GTSAM_EXPORT HybridConditional return boost::static_pointer_cast(inner_); } + /** + * @brief Return HybridConditional as a GaussianConditional + * + * @return GaussianConditional::shared_ptr + */ + GaussianConditional::shared_ptr asGaussian() { + if (!isContinuous()) + throw std::invalid_argument("Not a continuous conditional"); + return boost::static_pointer_cast(inner_); + } + /** * @brief Return conditional as a DiscreteConditional * @@ -168,10 +178,19 @@ class GTSAM_EXPORT HybridConditional /// Get the type-erased pointer to the inner type boost::shared_ptr inner() { return inner_; } -}; // DiscreteConditional + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + } + +}; // HybridConditional // traits template <> -struct traits : public Testable {}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 8df2d524f..e9d64b283 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -52,7 +52,6 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, HybridFactor::HybridFactor(const KeyVector &keys) : Base(keys), isContinuous_(true), - nrContinuous_(keys.size()), continuousKeys_(keys) {} /* ************************************************************************ */ @@ -62,7 +61,6 @@ HybridFactor::HybridFactor(const KeyVector &continuousKeys, isDiscrete_((continuousKeys.size() == 0) && (discreteKeys.size() != 0)), isContinuous_((continuousKeys.size() != 0) && (discreteKeys.size() == 0)), isHybrid_((continuousKeys.size() != 0) && (discreteKeys.size() != 0)), - nrContinuous_(continuousKeys.size()), discreteKeys_(discreteKeys), continuousKeys_(continuousKeys) {} @@ -78,7 +76,8 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); return e != nullptr && Base::equals(*e, tol) && isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ && - isHybrid_ == e->isHybrid_ && nrContinuous_ == e->nrContinuous_; + isHybrid_ == e->isHybrid_ && continuousKeys_ == e->continuousKeys_ && + discreteKeys_ == e->discreteKeys_; } /* ************************************************************************ */ @@ -88,17 +87,23 @@ void HybridFactor::print(const std::string &s, if (isContinuous_) std::cout << "Continuous "; if (isDiscrete_) std::cout << "Discrete "; if (isHybrid_) std::cout << "Hybrid "; - for (size_t c=0; c 0 ? "; " : ""); } } - for(auto && discreteKey: discreteKeys_) { - std::cout << formatter(discreteKey.first) << " "; + for (size_t d = 0; d < discreteKeys_.size(); d++) { + std::cout << formatter(discreteKeys_.at(d).first); + if (d < discreteKeys_.size() - 1) { + std::cout << " "; + } + } + std::cout << "]"; } } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 3e91e2487..165dfde29 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -47,11 +47,9 @@ class GTSAM_EXPORT HybridFactor : public Factor { bool isContinuous_ = false; bool isHybrid_ = false; - size_t nrContinuous_ = 0; - protected: + // Set of DiscreteKeys for this factor. DiscreteKeys discreteKeys_; - /// Record continuous keys for book-keeping KeyVector continuousKeys_; @@ -120,12 +118,28 @@ class GTSAM_EXPORT HybridFactor : public Factor { bool isHybrid() const { return isHybrid_; } /// Return the number of continuous variables in this factor. - size_t nrContinuous() const { return nrContinuous_; } + size_t nrContinuous() const { return continuousKeys_.size(); } - /// Return vector of discrete keys. - DiscreteKeys discreteKeys() const { return discreteKeys_; } + /// Return the discrete keys for this factor. + const DiscreteKeys &discreteKeys() const { return discreteKeys_; } + + /// Return only the continuous keys for this factor. + const KeyVector &continuousKeys() const { return continuousKeys_; } /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar &BOOST_SERIALIZATION_NVP(isDiscrete_); + ar &BOOST_SERIALIZATION_NVP(isContinuous_); + ar &BOOST_SERIALIZATION_NVP(isHybrid_); + ar &BOOST_SERIALIZATION_NVP(discreteKeys_); + ar &BOOST_SERIALIZATION_NVP(continuousKeys_); + } }; // HybridFactor diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h new file mode 100644 index 000000000..fc730f0c9 --- /dev/null +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -0,0 +1,140 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridFactorGraph.h + * @brief Hybrid factor graph base class that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +using SharedFactor = boost::shared_ptr; + +/** + * Hybrid Factor Graph + * ----------------------- + * This is the base hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class HybridFactorGraph : public FactorGraph { + public: + using Base = FactorGraph; + using This = HybridFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + protected: + /// Check if FACTOR type is derived from DiscreteFactor. + template + using IsDiscrete = typename std::enable_if< + std::is_base_of::value>::type; + + /// Check if FACTOR type is derived from HybridFactor. + template + using IsHybrid = typename std::enable_if< + std::is_base_of::value>::type; + + public: + /// @name Constructors + /// @{ + + /// Default constructor + HybridFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; + + /** + * Add a discrete factor *pointer* to the internal discrete graph + * @param discreteFactor - boost::shared_ptr to the factor to add + */ + template + IsDiscrete push_discrete( + const boost::shared_ptr& discreteFactor) { + Base::push_back(boost::make_shared(discreteFactor)); + } + + /** + * Add a discrete-continuous (Hybrid) factor *pointer* to the graph + * @param hybridFactor - boost::shared_ptr to the factor to add + */ + template + IsHybrid push_hybrid(const boost::shared_ptr& hybridFactor) { + Base::push_back(hybridFactor); + } + + /// delete emplace_shared. + template + void emplace_shared(Args&&... args) = delete; + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsDiscrete emplace_discrete(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_discrete(factor); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsHybrid emplace_hybrid(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_hybrid(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_discrete(p); + } + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_hybrid(p); + } + } +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 2a92c717c..e645e63e5 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -37,6 +37,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using This = HybridGaussianFactor; using shared_ptr = boost::shared_ptr; + HybridGaussianFactor() = default; + // Explicit conversion from a shared ptr of GF explicit HybridGaussianFactor(GaussianFactor::shared_ptr other); @@ -52,7 +54,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// GTSAM print utility. void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "HybridGaussianFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 88730cae9..09b592bd6 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -98,6 +98,12 @@ GaussianMixtureFactor::Sum sumFrontals( } else if (f->isContinuous()) { deferredFactors.push_back( boost::dynamic_pointer_cast(f)->inner()); + + } else if (f->isDiscrete()) { + // Don't do anything for discrete-only factors + // since we want to eliminate continuous values only. + continue; + } else { // We need to handle the case where the object is actually an // BayesTreeOrphanWrapper! @@ -106,8 +112,8 @@ GaussianMixtureFactor::Sum sumFrontals( if (!orphan) { auto &fr = *f; throw std::invalid_argument( - std::string("factor is discrete in continuous elimination") + - typeid(fr).name()); + std::string("factor is discrete in continuous elimination ") + + demangle(typeid(fr).name())); } } } @@ -129,9 +135,9 @@ continuousElimination(const HybridGaussianFactorGraph &factors, for (auto &fp : factors) { if (auto ptr = boost::dynamic_pointer_cast(fp)) { gfg.push_back(ptr->inner()); - } else if (auto p = - boost::static_pointer_cast(fp)->inner()) { - gfg.push_back(boost::static_pointer_cast(p)); + } else if (auto ptr = boost::static_pointer_cast(fp)) { + gfg.push_back( + boost::static_pointer_cast(ptr->inner())); } else { // It is an orphan wrapped conditional } @@ -147,18 +153,20 @@ std::pair discreteElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { DiscreteFactorGraph dfg; - for (auto &fp : factors) { - if (auto ptr = boost::dynamic_pointer_cast(fp)) { - dfg.push_back(ptr->inner()); - } else if (auto p = - boost::static_pointer_cast(fp)->inner()) { - dfg.push_back(boost::static_pointer_cast(p)); + + for (auto &factor : factors) { + if (auto p = boost::dynamic_pointer_cast(factor)) { + dfg.push_back(p->inner()); + } else if (auto p = boost::static_pointer_cast(factor)) { + auto discrete_conditional = + boost::static_pointer_cast(p->inner()); + dfg.push_back(discrete_conditional); } else { // It is an orphan wrapper } } - auto result = EliminateDiscrete(dfg, frontalKeys); + auto result = EliminateForMPE(dfg, frontalKeys); return {boost::make_shared(result.first), boost::make_shared(result.second)}; @@ -178,6 +186,19 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // sum out frontals, this is the factor on the separator GaussianMixtureFactor::Sum sum = sumFrontals(factors); + // If a tree leaf contains nullptr, + // convert that leaf to an empty GaussianFactorGraph. + // Needed since the DecisionTree will otherwise create + // a GFG with a single (null) factor. + auto emptyGaussian = [](const GaussianFactorGraph &gfg) { + bool hasNull = + std::any_of(gfg.begin(), gfg.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + + return hasNull ? GaussianFactorGraph() : gfg; + }; + sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); + using EliminationPair = GaussianFactorGraph::EliminationResult; KeyVector keysOfEliminated; // Not the ordering @@ -189,7 +210,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (graph.empty()) { return {nullptr, nullptr}; } - auto result = EliminatePreferCholesky(graph, frontalKeys); + std::pair, + boost::shared_ptr> + result = EliminatePreferCholesky(graph, frontalKeys); + if (keysOfEliminated.empty()) { keysOfEliminated = result.first->keys(); // Initialize the keysOfEliminated to be the @@ -222,6 +246,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, return exp(-factor->error(empty_values)); }; DecisionTree fdt(separatorFactors, factorError); + auto discreteFactor = boost::make_shared(discreteSeparator, fdt); @@ -229,14 +254,27 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(discreteFactor)}; } else { - // Create a resulting DCGaussianMixture on the separator. + // Create a resulting GaussianMixtureFactor on the separator. auto factor = boost::make_shared( KeyVector(continuousSeparator.begin(), continuousSeparator.end()), discreteSeparator, separatorFactors); return {boost::make_shared(conditional), factor}; } } -/* ************************************************************************ */ +/* ************************************************************************ + * Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ std::pair // EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { @@ -366,4 +404,41 @@ void HybridGaussianFactorGraph::add(DecisionTreeFactor::shared_ptr factor) { FactorGraph::add(boost::make_shared(factor)); } +/* ************************************************************************ */ +const KeySet HybridGaussianFactorGraph::getDiscreteKeys() const { + KeySet discrete_keys; + for (auto &factor : factors_) { + for (const DiscreteKey &k : factor->discreteKeys()) { + discrete_keys.insert(k.first); + } + } + return discrete_keys; +} + +/* ************************************************************************ */ +const KeySet HybridGaussianFactorGraph::getContinuousKeys() const { + KeySet keys; + for (auto &factor : factors_) { + for (const Key &key : factor->continuousKeys()) { + keys.insert(key); + } + } + return keys; +} + +/* ************************************************************************ */ +const Ordering HybridGaussianFactorGraph::getHybridOrdering() const { + KeySet discrete_keys = getDiscreteKeys(); + for (auto &factor : factors_) { + for (const DiscreteKey &k : factor->discreteKeys()) { + discrete_keys.insert(k.first); + } + } + + const VariableIndex index(factors_); + Ordering ordering = Ordering::ColamdConstrainedLast( + index, KeyVector(discrete_keys.begin(), discrete_keys.end()), true); + return ordering; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 0188aa652..ad5cde09b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -19,9 +19,12 @@ #pragma once #include +#include +#include #include #include #include +#include namespace gtsam { @@ -53,10 +56,9 @@ struct EliminationTraits { typedef HybridBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination typedef HybridEliminationTree - EliminationTreeType; ///< Type of elimination tree - typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree - typedef HybridJunctionTree - JunctionTreeType; ///< Type of Junction tree + EliminationTreeType; ///< Type of elimination tree + typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree + typedef HybridJunctionTree JunctionTreeType; ///< Type of Junction tree /// The default dense elimination function static std::pair, boost::shared_ptr > @@ -72,10 +74,16 @@ struct EliminationTraits { * Everything inside needs to be hybrid factor or hybrid conditional. */ class GTSAM_EXPORT HybridGaussianFactorGraph - : public FactorGraph, + : public HybridFactorGraph, public EliminateableFactorGraph { + protected: + /// Check if FACTOR type is derived from GaussianFactor. + template + using IsGaussian = typename std::enable_if< + std::is_base_of::value>::type; + public: - using Base = FactorGraph; + using Base = HybridFactorGraph; using This = HybridGaussianFactorGraph; ///< this class using BaseEliminateable = EliminateableFactorGraph; ///< for elimination @@ -100,7 +108,13 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @} - using FactorGraph::add; + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); @@ -113,6 +127,53 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// Add a DecisionTreeFactor as a shared ptr. void add(boost::shared_ptr factor); + + /** + * Add a gaussian factor *pointer* to the internal gaussian factor graph + * @param gaussianFactor - boost::shared_ptr to the factor to add + */ + template + IsGaussian push_gaussian( + const boost::shared_ptr& gaussianFactor) { + Base::push_back(boost::make_shared(gaussianFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsGaussian emplace_gaussian(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_gaussian(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_gaussian(p); + } else { + Base::push_back(sharedFactor); + } + } + + /// Get all the discrete keys in the factor graph. + const KeySet getDiscreteKeys() const; + + /// Get all the continuous keys in the factor graph. + const KeySet getContinuousKeys() const; + + /** + * @brief Return a Colamd constrained ordering where the discrete keys are + * eliminated after the continuous keys. + * + * @return const Ordering + */ + const Ordering getHybridOrdering() const; }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index 7783a88dd..23a95c021 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -41,6 +41,7 @@ HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree) void HybridGaussianISAM::updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { // Remove the contaminated part of the Bayes tree BayesNetType bn; @@ -74,16 +75,21 @@ void HybridGaussianISAM::updateInternal( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - // KeyVector new - // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); - const Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), - true); + Ordering elimination_ordering; + if (ordering) { + elimination_ordering = *ordering; + } else { + elimination_ordering = Ordering::ColamdConstrainedLast( + index, + KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), + true); + } // eliminate all factors (top, added, orphans) into a new Bayes tree - auto bayesTree = factors.eliminateMultifrontal(ordering, function, index); + HybridBayesTree::shared_ptr bayesTree = + factors.eliminateMultifrontal(elimination_ordering, function, index); // Re-add into Bayes tree data structures this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), @@ -93,9 +99,61 @@ void HybridGaussianISAM::updateInternal( /* ************************************************************************* */ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { Cliques orphans; - this->updateInternal(newFactors, &orphans, function); + this->updateInternal(newFactors, &orphans, ordering, function); +} + +/* ************************************************************************* */ +/** + * @brief Check if `b` is a subset of `a`. + * Non-const since they need to be sorted. + * + * @param a KeyVector + * @param b KeyVector + * @return True if the keys of b is a subset of a, else false. + */ +bool IsSubset(KeyVector a, KeyVector b) { + std::sort(a.begin(), a.end()); + std::sort(b.begin(), b.end()); + return std::includes(a.begin(), a.end(), b.begin(), b.end()); +} + +/* ************************************************************************* */ +void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { + auto decisionTree = boost::dynamic_pointer_cast( + this->clique(root)->conditional()->inner()); + DecisionTreeFactor prunedDiscreteFactor = decisionTree->prune(maxNrLeaves); + decisionTree->root_ = prunedDiscreteFactor.root_; + + std::vector prunedKeys; + for (auto&& clique : nodes()) { + // The cliques can be repeated for each frontal so we record it in + // prunedKeys and check if we have already pruned a particular clique. + if (std::find(prunedKeys.begin(), prunedKeys.end(), clique.first) != + prunedKeys.end()) { + continue; + } + + // Add all the keys of the current clique to be pruned to prunedKeys + for (auto&& key : clique.second->conditional()->frontals()) { + prunedKeys.push_back(key); + } + + // Convert parents() to a KeyVector for comparison + KeyVector parents; + for (auto&& parent : clique.second->conditional()->parents()) { + parents.push_back(parent); + } + + if (IsSubset(parents, decisionTree->keys())) { + auto gaussianMixture = boost::dynamic_pointer_cast( + clique.second->conditional()->inner()); + + gaussianMixture->prune(prunedDiscreteFactor); + } + } } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.h b/gtsam/hybrid/HybridGaussianISAM.h index d5b6271da..4fc206582 100644 --- a/gtsam/hybrid/HybridGaussianISAM.h +++ b/gtsam/hybrid/HybridGaussianISAM.h @@ -48,6 +48,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { void updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); @@ -59,8 +60,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { * @param function Elimination function. */ void update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); + + /** + * @brief + * + * @param root The root key in the discrete conditional decision tree. + * @param maxNumberLeaves + */ + void prune(const Key& root, const size_t maxNumberLeaves); }; /// traits diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index 7725742cf..422c200a4 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -31,9 +31,7 @@ template class EliminatableClusterTree; struct HybridConstructorTraversalData { - typedef - typename JunctionTree::Node - Node; + typedef HybridJunctionTree::Node Node; typedef typename JunctionTree::sharedNode sharedNode; @@ -62,6 +60,7 @@ struct HybridConstructorTraversalData { data.junctionTreeNode = boost::make_shared(node->key, node->factors); parentData.junctionTreeNode->addChild(data.junctionTreeNode); + // Add all the discrete keys in the hybrid factors to the current data for (HybridFactor::shared_ptr& f : node->factors) { for (auto& k : f->discreteKeys()) { data.discreteKeys.insert(k.first); @@ -72,8 +71,8 @@ struct HybridConstructorTraversalData { } // Post-order visitor function - static void ConstructorTraversalVisitorPostAlg2( - const boost::shared_ptr& ETreeNode, + static void ConstructorTraversalVisitorPost( + const boost::shared_ptr& node, const HybridConstructorTraversalData& data) { // In this post-order visitor, we combine the symbolic elimination results // from the elimination tree children and symbolically eliminate the current @@ -86,15 +85,15 @@ struct HybridConstructorTraversalData { // Do symbolic elimination for this node SymbolicFactors symbolicFactors; - symbolicFactors.reserve(ETreeNode->factors.size() + + symbolicFactors.reserve(node->factors.size() + data.childSymbolicFactors.size()); // Add ETree node factors - symbolicFactors += ETreeNode->factors; + symbolicFactors += node->factors; // Add symbolic factors passed up from children symbolicFactors += data.childSymbolicFactors; Ordering keyAsOrdering; - keyAsOrdering.push_back(ETreeNode->key); + keyAsOrdering.push_back(node->key); SymbolicConditional::shared_ptr conditional; SymbolicFactor::shared_ptr separatorFactor; boost::tie(conditional, separatorFactor) = @@ -105,19 +104,19 @@ struct HybridConstructorTraversalData { data.parentData->childSymbolicFactors.push_back(separatorFactor); data.parentData->discreteKeys.merge(data.discreteKeys); - sharedNode node = data.junctionTreeNode; + sharedNode jt_node = data.junctionTreeNode; const FastVector& childConditionals = data.childSymbolicConditionals; - node->problemSize_ = (int)(conditional->size() * symbolicFactors.size()); + jt_node->problemSize_ = (int)(conditional->size() * symbolicFactors.size()); // Merge our children if they are in our clique - if our conditional has // exactly one fewer parent than our child's conditional. const size_t nrParents = conditional->nrParents(); - const size_t nrChildren = node->nrChildren(); + const size_t nrChildren = jt_node->nrChildren(); assert(childConditionals.size() == nrChildren); // decide which children to merge, as index into children - std::vector nrChildrenFrontals = node->nrFrontalsOfChildren(); + std::vector nrChildrenFrontals = jt_node->nrFrontalsOfChildren(); std::vector merge(nrChildren, false); size_t nrFrontals = 1; for (size_t i = 0; i < nrChildren; i++) { @@ -137,7 +136,7 @@ struct HybridConstructorTraversalData { } // now really merge - node->mergeChildren(merge); + jt_node->mergeChildren(merge); } }; @@ -161,7 +160,7 @@ HybridJunctionTree::HybridJunctionTree( // the junction tree roots treeTraversal::DepthFirstForest(eliminationTree, rootData, Data::ConstructorTraversalVisitorPre, - Data::ConstructorTraversalVisitorPostAlg2); + Data::ConstructorTraversalVisitorPost); // Assign roots from the dummy node this->addChildrenAsRoots(rootData.junctionTreeNode); diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp new file mode 100644 index 000000000..5a1833d39 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -0,0 +1,41 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.cpp + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#include + +#include + +namespace gtsam { + +/* ************************************************************************* */ +HybridNonlinearFactor::HybridNonlinearFactor( + const NonlinearFactor::shared_ptr &other) + : Base(other->keys()), inner_(other) {} + +/* ************************************************************************* */ +bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const { + return Base::equals(lf, tol); +} + +/* ************************************************************************* */ +void HybridNonlinearFactor::print(const std::string &s, + const KeyFormatter &formatter) const { + HybridFactor::print(s, formatter); + inner_->print("\n", formatter); +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h new file mode 100644 index 000000000..7776347b3 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -0,0 +1,62 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.h + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + +/** + * A HybridNonlinearFactor is a layer over NonlinearFactor so that we do not + * have a diamond inheritance. + */ +class HybridNonlinearFactor : public HybridFactor { + private: + NonlinearFactor::shared_ptr inner_; + + public: + using Base = HybridFactor; + using This = HybridNonlinearFactor; + using shared_ptr = boost::shared_ptr; + + // Explicit conversion from a shared ptr of NonlinearFactor + explicit HybridNonlinearFactor(const NonlinearFactor::shared_ptr &other); + + /// @name Testable + /// @{ + + /// Check equality. + virtual bool equals(const HybridFactor &lf, double tol) const override; + + /// GTSAM print utility. + void print( + const std::string &s = "HybridNonlinearFactor\n", + const KeyFormatter &formatter = DefaultKeyFormatter) const override; + + /// @} + + NonlinearFactor::shared_ptr inner() const { return inner_; } + + /// Linearize to a HybridGaussianFactor at the linearization point `c`. + boost::shared_ptr linearize(const Values &c) const { + return boost::make_shared(inner_->linearize(c)); + } +}; +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp new file mode 100644 index 000000000..a4218593b --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -0,0 +1,100 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactorGraph.cpp + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#include + +namespace gtsam { + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::add( + boost::shared_ptr factor) { + FactorGraph::add(boost::make_shared(factor)); +} + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::add( + boost::shared_ptr factor) { + FactorGraph::add(boost::make_shared(factor)); +} + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + // Base::print(str, keyFormatter); + std::cout << (s.empty() ? "" : s + " ") << std::endl; + std::cout << "size: " << size() << std::endl; + for (size_t i = 0; i < factors_.size(); i++) { + std::stringstream ss; + ss << "factor " << i << ": "; + if (factors_[i]) { + factors_[i]->print(ss.str(), keyFormatter); + std::cout << std::endl; + } + } +} + +/* ************************************************************************* */ +HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( + const Values& continuousValues) const { + // create an empty linear FG + HybridGaussianFactorGraph linearFG; + + linearFG.reserve(size()); + + // linearize all hybrid factors + for (auto&& factor : factors_) { + // First check if it is a valid factor + if (factor) { + // Check if the factor is a hybrid factor. + // It can be either a nonlinear MixtureFactor or a linear + // GaussianMixtureFactor. + if (factor->isHybrid()) { + // Check if it is a nonlinear mixture factor + if (auto nlmf = boost::dynamic_pointer_cast(factor)) { + linearFG.push_back(nlmf->linearize(continuousValues)); + } else { + linearFG.push_back(factor); + } + + // Now check if the factor is a continuous only factor. + } else if (factor->isContinuous()) { + // In this case, we check if factor->inner() is nonlinear since + // HybridFactors wrap over continuous factors. + auto nlhf = boost::dynamic_pointer_cast(factor); + if (auto nlf = + boost::dynamic_pointer_cast(nlhf->inner())) { + auto hgf = boost::make_shared( + nlf->linearize(continuousValues)); + linearFG.push_back(hgf); + } else { + linearFG.push_back(factor); + } + // Finally if nothing else, we are discrete-only which doesn't need + // lineariztion. + } else { + linearFG.push_back(factor); + } + + } else { + linearFG.push_back(GaussianFactor::shared_ptr()); + } + } + return linearFG; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h new file mode 100644 index 000000000..7a19c7755 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -0,0 +1,137 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactorGraph.h + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +/** + * Nonlinear Hybrid Factor Graph + * ----------------------- + * This is the non-linear version of a hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { + protected: + /// Check if FACTOR type is derived from NonlinearFactor. + template + using IsNonlinear = typename std::enable_if< + std::is_base_of::value>::type; + + public: + using Base = HybridFactorGraph; + using This = HybridNonlinearFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + /// @name Constructors + /// @{ + + HybridNonlinearFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridNonlinearFactorGraph(const FactorGraph& graph) + : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::resize; + + /** + * Add a nonlinear factor *pointer* to the internal nonlinear factor graph + * @param nonlinearFactor - boost::shared_ptr to the factor to add + */ + template + IsNonlinear push_nonlinear( + const boost::shared_ptr& nonlinearFactor) { + Base::push_back(boost::make_shared(nonlinearFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsNonlinear emplace_nonlinear(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_nonlinear(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @tparam FACTOR The factor type template + * @param sharedFactor The factor to add to this factor graph. + */ + template + void push_back(const boost::shared_ptr& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_nonlinear(p); + } else { + Base::push_back(sharedFactor); + } + } + + /// Add a nonlinear factor as a shared ptr. + void add(boost::shared_ptr factor); + + /// Add a discrete factor as a shared ptr. + void add(boost::shared_ptr factor); + + /// Print the factor graph. + void print( + const std::string& s = "HybridNonlinearFactorGraph", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; + + /** + * @brief Linearize all the continuous factors in the + * HybridNonlinearFactorGraph. + * + * @param continuousValues: Dictionary of continuous values. + * @return HybridGaussianFactorGraph::shared_ptr + */ + HybridGaussianFactorGraph linearize(const Values& continuousValues) const; +}; + +template <> +struct traits + : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h new file mode 100644 index 000000000..4928f9384 --- /dev/null +++ b/gtsam/hybrid/HybridValues.h @@ -0,0 +1,145 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridValues.h + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace gtsam { + +/** + * HybridValues represents a collection of DiscreteValues and VectorValues. + * It is typically used to store the variables of a HybridGaussianFactorGraph. + * Optimizing a HybridGaussianBayesNet returns this class. + */ +class GTSAM_EXPORT HybridValues { + private: + // DiscreteValue stored the discrete components of the HybridValues. + DiscreteValues discrete_; + + // VectorValue stored the continuous components of the HybridValues. + VectorValues continuous_; + + public: + /// @name Standard Constructors + /// @{ + + /// Default constructor creates an empty HybridValues. + HybridValues() = default; + + /// Construct from DiscreteValues and VectorValues. + HybridValues(const DiscreteValues& dv, const VectorValues& cv) + : discrete_(dv), continuous_(cv){}; + + /// @} + /// @name Testable + /// @{ + + /// print required by Testable for unit testing + void print(const std::string& s = "HybridValues", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::cout << s << ": \n"; + discrete_.print(" Discrete", keyFormatter); // print discrete components + continuous_.print(" Continuous", + keyFormatter); // print continuous components + }; + + /// equals required by Testable for unit testing + bool equals(const HybridValues& other, double tol = 1e-9) const { + return discrete_.equals(other.discrete_, tol) && + continuous_.equals(other.continuous_, tol); + } + + /// @} + /// @name Interface + /// @{ + + /// Return the discrete MPE assignment + DiscreteValues discrete() const { return discrete_; } + + /// Return the delta update for the continuous vectors + VectorValues continuous() const { return continuous_; } + + /// Check whether a variable with key \c j exists in DiscreteValue. + bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); }; + + /// Check whether a variable with key \c j exists in VectorValue. + bool existsVector(Key j) { return continuous_.exists(j); }; + + /// Check whether a variable with key \c j exists. + bool exists(Key j) { return existsDiscrete(j) || existsVector(j); }; + + /** Insert a discrete \c value with key \c j. Replaces the existing value if + * the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, int value) { discrete_[j] = value; }; + + /** Insert a vector \c value with key \c j. Throws an invalid_argument + * exception if the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, const Vector& value) { continuous_.insert(j, value); } + + // TODO(Shangjie)- update() and insert_or_assign() , similar to Values.h + + /** + * Read/write access to the discrete value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + size_t& atDiscrete(Key j) { return discrete_.at(j); }; + + /** + * Read/write access to the vector value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + Vector& at(Key j) { return continuous_.at(j); }; + + /// @name Wrapper support + /// @{ + + /** + * @brief Output as a html table. + * + * @param keyFormatter function that formats keys. + * @return string html output. + */ + std::string html( + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::stringstream ss; + ss << this->discrete_.html(keyFormatter); + ss << this->continuous_.html(keyFormatter); + return ss.str(); + }; + + /// @} +}; + +// traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h new file mode 100644 index 000000000..5e7337d0c --- /dev/null +++ b/gtsam/hybrid/MixtureFactor.h @@ -0,0 +1,256 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file MixtureFactor.h + * @brief Nonlinear Mixture factor of continuous and discrete. + * @author Kevin Doherty, kdoherty@mit.edu + * @author Varun Agrawal + * @date December 2021 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace gtsam { + +/** + * @brief Implementation of a discrete conditional mixture factor. + * + * Implements a joint discrete-continuous factor where the discrete variable + * serves to "select" a mixture component corresponding to a NonlinearFactor + * type of measurement. + * + * This class stores all factors as HybridFactors which can then be typecast to + * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform + * the correct operation. + */ +class MixtureFactor : public HybridFactor { + public: + using Base = HybridFactor; + using This = MixtureFactor; + using shared_ptr = boost::shared_ptr; + using sharedFactor = boost::shared_ptr; + + /** + * @brief typedef for DecisionTree which has Keys as node labels and + * NonlinearFactor as leaf nodes. + */ + using Factors = DecisionTree; + + private: + /// Decision tree of Gaussian factors indexed by discrete keys. + Factors factors_; + bool normalized_; + + public: + MixtureFactor() = default; + + /** + * @brief Construct from Decision tree. + * + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Decision tree with of shared factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const Factors& factors, bool normalized = false) + : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} + + /** + * @brief Convenience constructor that generates the underlying factor + * decision tree for us. + * + * Here it is important that the vector of factors has the correct number of + * elements based on the number of discrete keys and the cardinality of the + * keys, so that the decision tree is constructed appropriately. + * + * @tparam FACTOR The type of the factor shared pointers being passed in. Will + * be typecast to NonlinearFactor shared pointers. + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Vector of shared pointers to factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + template + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const std::vector>& factors, + bool normalized = false) + : Base(keys, discreteKeys), normalized_(normalized) { + std::vector nonlinear_factors; + KeySet continuous_keys_set(keys.begin(), keys.end()); + KeySet factor_keys_set; + for (auto&& f : factors) { + // Insert all factor continuous keys in the continuous keys set. + std::copy(f->keys().begin(), f->keys().end(), + std::inserter(factor_keys_set, factor_keys_set.end())); + + nonlinear_factors.push_back( + boost::dynamic_pointer_cast(f)); + } + factors_ = Factors(discreteKeys, nonlinear_factors); + + if (continuous_keys_set != factor_keys_set) { + throw std::runtime_error( + "The specified continuous keys and the keys in the factors don't " + "match!"); + } + } + + ~MixtureFactor() = default; + + /** + * @brief Compute error of factor given both continuous and discrete values. + * + * @param continuousVals The continuous Values. + * @param discreteVals The discrete Values. + * @return double The error of this factor. + */ + double error(const Values& continuousVals, + const DiscreteValues& discreteVals) const { + // Retrieve the factor corresponding to the assignment in discreteVals. + auto factor = factors_(discreteVals); + // Compute the error for the selected factor + const double factorError = factor->error(continuousVals); + if (normalized_) return factorError; + return factorError + + this->nonlinearFactorLogNormalizingConstant(factor, continuousVals); + } + + size_t dim() const { + // TODO(Varun) + throw std::runtime_error("MixtureFactor::dim not implemented."); + } + + /// Testable + /// @{ + + /// print to stdout + void print( + const std::string& s = "MixtureFactor", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { + std::cout << (s.empty() ? "" : s + " "); + Base::print("", keyFormatter); + std::cout << "\nMixtureFactor\n"; + auto valueFormatter = [](const sharedFactor& v) { + if (v) { + return (boost::format("Nonlinear factor on %d keys") % v->size()).str(); + } else { + return std::string("nullptr"); + } + }; + factors_.print("", keyFormatter, valueFormatter); + } + + /// Check equality + bool equals(const HybridFactor& other, double tol = 1e-9) const override { + // We attempt a dynamic cast from HybridFactor to MixtureFactor. If it + // fails, return false. + if (!dynamic_cast(&other)) return false; + + // If the cast is successful, we'll properly construct a MixtureFactor + // object from `other` + const MixtureFactor& f(static_cast(other)); + + // Ensure that this MixtureFactor and `f` have the same `factors_`. + auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { + return traits::Equals(*a, *b, tol); + }; + if (!factors_.equals(f.factors_, compare)) return false; + + // If everything above passes, and the keys_, discreteKeys_ and normalized_ + // member variables are identical, return true. + return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && + (discreteKeys_ == f.discreteKeys_) && + (normalized_ == f.normalized_)); + } + + /// @} + + /// Linearize specific nonlinear factors based on the assignment in + /// discreteValues. + GaussianFactor::shared_ptr linearize( + const Values& continuousVals, const DiscreteValues& discreteVals) const { + auto factor = factors_(discreteVals); + return factor->linearize(continuousVals); + } + + /// Linearize all the continuous factors to get a GaussianMixtureFactor. + boost::shared_ptr linearize( + const Values& continuousVals) const { + // functional to linearize each factor in the decision tree + auto linearizeDT = [continuousVals](const sharedFactor& factor) { + return factor->linearize(continuousVals); + }; + + DecisionTree linearized_factors( + factors_, linearizeDT); + + return boost::make_shared( + continuousKeys_, discreteKeys_, linearized_factors); + } + + /** + * If the component factors are not already normalized, we want to compute + * their normalizing constants so that the resulting joint distribution is + * appropriately computed. Remember, this is the _negative_ normalizing + * constant for the measurement likelihood (since we are minimizing the + * _negative_ log-likelihood). + */ + double nonlinearFactorLogNormalizingConstant(const sharedFactor& factor, + const Values& values) const { + // Information matrix (inverse covariance matrix) for the factor. + Matrix infoMat; + + // If this is a NoiseModelFactor, we'll use its noiseModel to + // otherwise noiseModelFactor will be nullptr + if (auto noiseModelFactor = + boost::dynamic_pointer_cast(factor)) { + // If dynamic cast to NoiseModelFactor succeeded, see if the noise model + // is Gaussian + auto noiseModel = noiseModelFactor->noiseModel(); + + auto gaussianNoiseModel = + boost::dynamic_pointer_cast(noiseModel); + if (gaussianNoiseModel) { + // If the noise model is Gaussian, retrieve the information matrix + infoMat = gaussianNoiseModel->information(); + } else { + // If the factor is not a Gaussian factor, we'll linearize it to get + // something with a normalized noise model + // TODO(kevin): does this make sense to do? I think maybe not in + // general? Should we just yell at the user? + auto gaussianFactor = factor->linearize(values); + infoMat = gaussianFactor->information(); + } + } + + // Compute the (negative) log of the normalizing constant + return -(factor->dim() * log(2.0 * M_PI) / 2.0) - + (log(infoMat.determinant()) / 2.0); + } +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index bbe1e2400..86029a48a 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -4,6 +4,22 @@ namespace gtsam { +#include +class HybridValues { + gtsam::DiscreteValues discrete() const; + gtsam::VectorValues continuous() const; + HybridValues(); + HybridValues(const gtsam::DiscreteValues &dv, const gtsam::VectorValues &cv); + void print(string s = "HybridValues", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; + bool equals(const gtsam::HybridValues& other, double tol) const; + void insert(gtsam::Key j, int value); + void insert(gtsam::Key j, const gtsam::Vector& value); + size_t& atDiscrete(gtsam::Key j); + gtsam::Vector& at(gtsam::Key j); +}; + #include virtual class HybridFactor { void print(string s = "HybridFactor\n", @@ -23,7 +39,17 @@ virtual class HybridConditional { bool equals(const gtsam::HybridConditional& other, double tol = 1e-9) const; size_t nrFrontals() const; size_t nrParents() const; - Factor* inner(); + gtsam::Factor* inner(); +}; + +#include +virtual class HybridDiscreteFactor { + HybridDiscreteFactor(gtsam::DecisionTreeFactor dtf); + void print(string s = "HybridDiscreteFactor\n", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; + bool equals(const gtsam::HybridDiscreteFactor& other, double tol = 1e-9) const; + gtsam::Factor* inner(); }; #include @@ -73,6 +99,8 @@ class HybridBayesTree { bool empty() const; const HybridBayesTreeClique* operator[](size_t j) const; + gtsam::HybridValues optimize() const; + string dot(const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; }; @@ -84,6 +112,7 @@ class HybridBayesNet { size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; + gtsam::HybridValues optimize() const; void print(string s = "HybridBayesNet\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; @@ -115,6 +144,7 @@ class HybridGaussianFactorGraph { void add(gtsam::JacobianFactor* factor); bool empty() const; + void remove(size_t i); size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridFactor* at(size_t i) const; @@ -142,4 +172,50 @@ class HybridGaussianFactorGraph { const gtsam::DotWriter& writer = gtsam::DotWriter()) const; }; +#include +class HybridNonlinearFactorGraph { + HybridNonlinearFactorGraph(); + HybridNonlinearFactorGraph(const gtsam::HybridNonlinearFactorGraph& graph); + void push_back(gtsam::HybridFactor* factor); + void push_back(gtsam::NonlinearFactor* factor); + void push_back(gtsam::HybridDiscreteFactor* factor); + void add(gtsam::NonlinearFactor* factor); + void add(gtsam::DiscreteFactor* factor); + gtsam::HybridGaussianFactorGraph linearize(const gtsam::Values& continuousValues) const; + + bool empty() const; + void remove(size_t i); + size_t size() const; + gtsam::KeySet keys() const; + const gtsam::HybridFactor* at(size_t i) const; + + void print(string s = "HybridNonlinearFactorGraph\n", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; +}; + +#include +class MixtureFactor : gtsam::HybridFactor { + MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const gtsam::DecisionTree& factors, bool normalized = false); + + template + MixtureFactor(const gtsam::KeyVector& keys, const gtsam::DiscreteKeys& discreteKeys, + const std::vector& factors, + bool normalized = false); + + double error(const gtsam::Values& continuousVals, + const gtsam::DiscreteValues& discreteVals) const; + + double nonlinearFactorLogNormalizingConstant(const gtsam::NonlinearFactor* factor, + const gtsam::Values& values) const; + + GaussianMixtureFactor* linearize( + const gtsam::Values& continuousVals) const; + + void print(string s = "MixtureFactor\n", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; +}; + } // namespace gtsam diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index c081b8e87..ecd90e234 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -18,20 +18,39 @@ #include #include +#include #include #include +#include +#include #include #include +#include +#include +#include + +#include #pragma once -using gtsam::symbol_shorthand::C; -using gtsam::symbol_shorthand::X; - namespace gtsam { + +using symbol_shorthand::C; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/** + * @brief Create a switching system chain. A switching system is a continuous + * system which depends on a discrete mode at each time step of the chain. + * + * @param n The number of chain elements. + * @param keyFunc The functional to help specify the continuous key. + * @param dKeyFunc The functional to help specify the discrete key. + * @return HybridGaussianFactorGraph::shared_ptr + */ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, - std::function dKeyFunc = C) { + std::function dKeyFunc = M) { HybridGaussianFactorGraph hfg; hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); @@ -54,9 +73,19 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( return boost::make_shared(std::move(hfg)); } +/** + * @brief Return the ordering as a binary tree such that all parent nodes are + * above their children. + * + * This will result in a nested dissection Bayes tree after elimination. + * + * @param input The original ordering. + * @return std::pair> + */ inline std::pair> makeBinaryOrdering( std::vector &input) { KeyVector new_order; + std::vector levels(input.size()); std::function::iterator, std::vector::iterator, int)> @@ -79,8 +108,101 @@ inline std::pair> makeBinaryOrdering( bsg(input.begin(), input.end(), 0); std::reverse(new_order.begin(), new_order.end()); - // std::reverse(levels.begin(), levels.end()); + return {new_order, levels}; } +/* *************************************************************************** + */ +using MotionModel = BetweenFactor; +// using MotionMixture = MixtureFactor; + +// Test fixture with switching network. +struct Switching { + size_t K; + DiscreteKeys modes; + HybridNonlinearFactorGraph nonlinearFactorGraph; + HybridGaussianFactorGraph linearizedFactorGraph; + Values linearizationPoint; + + /// Create with given number of time steps. + Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1) + : K(K) { + // Create DiscreteKeys for binary K modes, modes[0] will not be used. + for (size_t k = 0; k <= K; k++) { + modes.emplace_back(M(k), 2); + } + + // Create hybrid factor graph. + // Add a prior on X(1). + auto prior = boost::make_shared>( + X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma)); + nonlinearFactorGraph.push_nonlinear(prior); + + // Add "motion models". + for (size_t k = 1; k < K; k++) { + KeyVector keys = {X(k), X(k + 1)}; + auto motion_models = motionModels(k, between_sigma); + std::vector components; + for (auto &&f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } + nonlinearFactorGraph.emplace_hybrid( + keys, DiscreteKeys{modes[k]}, components); + } + + // Add measurement factors + auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma); + for (size_t k = 2; k <= K; k++) { + nonlinearFactorGraph.emplace_nonlinear>( + X(k), 1.0 * (k - 1), measurement_noise); + } + + // Add "mode chain" + addModeChain(&nonlinearFactorGraph); + + // Create the linearization point. + for (size_t k = 1; k <= K; k++) { + linearizationPoint.insert(X(k), static_cast(k)); + } + + linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint); + } + + // Create motion models for a given time step + static std::vector motionModels(size_t k, + double sigma = 1.0) { + auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); + auto still = + boost::make_shared(X(k), X(k + 1), 0.0, noise_model), + moving = + boost::make_shared(X(k), X(k + 1), 1.0, noise_model); + return {still, moving}; + } + + // Add "mode chain" to HybridNonlinearFactorGraph + void addModeChain(HybridNonlinearFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } + + // Add "mode chain" to HybridGaussianFactorGraph + void addModeChain(HybridGaussianFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } +}; + } // namespace gtsam diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 552bb18f5..40da42412 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -30,6 +31,7 @@ #include #include #include +#include #include #include #include @@ -52,32 +54,36 @@ using namespace boost::assign; using namespace std; using namespace gtsam; -using gtsam::symbol_shorthand::C; using gtsam::symbol_shorthand::D; +using gtsam::symbol_shorthand::M; using gtsam::symbol_shorthand::X; using gtsam::symbol_shorthand::Y; /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, creation) { - HybridConditional test; +TEST(HybridGaussianFactorGraph, Creation) { + HybridConditional conditional; HybridGaussianFactorGraph hfg; - hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); + hfg.add(HybridGaussianFactor(JacobianFactor(X(0), I_3x3, Z_3x1))); - GaussianMixture clgc( - {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), - GaussianMixture::Conditionals( - C(0), - boost::make_shared(X(0), Z_3x1, I_3x3, X(1), - I_3x3), - boost::make_shared(X(0), Vector3::Ones(), I_3x3, - X(1), I_3x3))); - GTSAM_PRINT(clgc); + // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor + // graph + GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + GaussianMixture::Conditionals( + M(0), + boost::make_shared( + X(0), Z_3x1, I_3x3, X(1), I_3x3), + boost::make_shared( + X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); + hfg.add(gm); + + EXPECT_LONGS_EQUAL(2, hfg.size()); } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminate) { +TEST(HybridGaussianFactorGraph, EliminateSequential) { + // Test elimination of a single variable. HybridGaussianFactorGraph hfg; hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); @@ -88,13 +94,15 @@ TEST(HybridGaussianFactorGraph, eliminate) { } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminateMultifrontal) { +TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { + // Test multifrontal elimination HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); + // Add priors on x0 and c1 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); Ordering ordering; ordering.push_back(X(0)); @@ -108,117 +116,111 @@ TEST(HybridGaussianFactorGraph, eliminateMultifrontal) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Add a gaussian mixture factor Ï•(x1, c1) DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); auto result = - hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)})); auto dc = result->at(2)->asDiscreteConditional(); DiscreteValues dv; - dv[C(1)] = 0; - EXPECT_DOUBLES_EQUAL(0.6225, dc->operator()(dv), 1e-3); + dv[M(1)] = 0; + EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - boost::make_shared(X(1), I_3x3, Vector3::Ones())); + std::vector factors = { + boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())}; + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8}))); - hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); + // Discrete probability table for c1 + hfg.add(DecisionTreeFactor(m1, {2, 8})); + // Joint discrete probability table for c1, c2 + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateSequential( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesNet::shared_ptr result = hfg.eliminateSequential( + Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); - GTSAM_PRINT(*result); + // There are 4 variables (2 continuous + 2 discrete) in the bayes net. + EXPECT_LONGS_EQUAL(4, result->size()); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // DecisionTree dt( - // C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - // boost::make_shared(X(1), I_3x3, Vector3::Ones())); - - // hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(GaussianMixtureFactor::FromFactors( - {X(1)}, {{C(1), 2}}, + {X(1)}, {{M(1), 2}}, {boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())})); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); - hfg.add(DecisionTreeFactor(c1, {2, 8})); - hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); + hfg.add(DecisionTreeFactor(m1, {2, 8})); + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateMultifrontal( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesTree::shared_ptr result = + hfg.eliminateMultifrontal(hfg.getHybridOrdering()); - GTSAM_PRINT(*result); - GTSAM_PRINT(*result->marginalFactor(C(2))); + // The bayes tree should have 3 cliques + EXPECT_LONGS_EQUAL(3, result->size()); + // GTSAM_PRINT(*result); + // GTSAM_PRINT(*result->marginalFactor(M(2))); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); + // Prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Factor between x0-x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Decision tree with different modes on x1 DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 - // 2 3 4"))); + // Hybrid factor P(x1|c1) + hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); + // Prior factor on c1 + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); - auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)}); + // Get a constrained ordering keeping c1 last + auto ordering_full = hfg.getHybridOrdering(); + // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ + EXPECT_LONGS_EQUAL(3, hbt->size()); } /* ************************************************************************* */ @@ -233,66 +235,72 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - // DecisionTree dt( - // C(0), boost::make_shared(X(0), I_3x3, Z_3x1), - // boost::make_shared(X(0), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor::FromFactors( - {X(0)}, {{C(0), 2}}, + {X(0)}, {{M(0), 2}}, {boost::make_shared(X(0), I_3x3, Z_3x1), boost::make_shared(X(0), I_3x3, Vector3::Ones())})); DecisionTree dt1( - C(1), boost::make_shared(X(2), I_3x3, Z_3x1), + M(1), boost::make_shared(X(2), I_3x3, Z_3x1), boost::make_shared(X(2), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); } - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); + DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"))); hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { DecisionTree dt( - C(3), boost::make_shared(X(3), I_3x3, Z_3x1), + M(3), boost::make_shared(X(3), I_3x3, Z_3x1), boost::make_shared(X(3), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(3)}, {{C(3), 2}}, dt)); + hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); DecisionTree dt1( - C(2), boost::make_shared(X(5), I_3x3, Z_3x1), + M(2), boost::make_shared(X(5), I_3x3, Z_3x1), boost::make_shared(X(5), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(5)}, {{C(2), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); } auto ordering_full = - Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)}); - - GTSAM_PRINT(ordering_full); + Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); + // 9 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(9, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); - GTSAM_PRINT(*remaining); - - hbt->dot(std::cout); /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. + (Fan) Explanation: the Junction tree will need to reeliminate to get to the + marginal on X(1), which is not possible because it involves eliminating + discrete before continuous. The solution to this, however, is in Murphy02. + TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. + And I believe that we should do this. */ } +void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, + const HybridBayesTree::shared_ptr &hbt, + const Ordering &ordering) { + DotWriter dw; + dw.positionHints['c'] = 2; + dw.positionHints['x'] = 1; + std::cout << hfg->dot(DefaultKeyFormatter, dw); + std::cout << "\n"; + hbt->dot(std::cout); + + std::cout << "\n"; + std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw); +} + /* ************************************************************************* */ // TODO(fan): make a graph like Varun's paper one TEST(HybridGaussianFactorGraph, Switching) { @@ -303,13 +311,13 @@ TEST(HybridGaussianFactorGraph, Switching) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -326,79 +334,32 @@ TEST(HybridGaussianFactorGraph, Switching) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); - // auto ordering_full = - // Ordering(); - - // for (int i = 1; i <= 9; i++) { - // ordering_full.push_back(X(i)); - // } - - // for (int i = 1; i < 9; i++) { - // ordering_full.push_back(C(i)); - // } - - // auto ordering_full = - // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), - // X(5), - // C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8)}); - // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); + // GTSAM_PRINT(ordering_full); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ - hbt->marginalFactor(C(11))->print("HBT: "); + // 12 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(12, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); } /* ************************************************************************* */ @@ -411,13 +372,13 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -434,67 +395,37 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); - // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); - HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - auto new_fg = makeSwitchingChain(12); auto isam = HybridGaussianISAM(*hbt); - { - HybridGaussianFactorGraph factorGraph; - factorGraph.push_back(new_fg->at(new_fg->size() - 2)); - factorGraph.push_back(new_fg->at(new_fg->size() - 1)); - isam.update(factorGraph); - std::cout << isam.dot(); - isam.marginalFactor(C(11))->print(); - } + // Run an ISAM update. + HybridGaussianFactorGraph factorGraph; + factorGraph.push_back(new_fg->at(new_fg->size() - 2)); + factorGraph.push_back(new_fg->at(new_fg->size() - 1)); + isam.update(factorGraph); + + // ISAM should have 12 factors after the last update + EXPECT_LONGS_EQUAL(12, isam.size()); } /* ************************************************************************* */ @@ -517,23 +448,8 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { ordX.emplace_back(Y(i)); } - // { - // KeyVector ndX; - // std::vector lvls; - // std::tie(ndX, lvls) = makeBinaryOrdering(naturalX); - // std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); - // std::copy(lvls.begin(), lvls.end(), - // std::ostream_iterator(std::cout, ",")); - // std::cout << "\n"; - - // for (size_t i = 0; i < N; i++) { - // ordX.emplace_back(X(ndX[i])); - // ordX.emplace_back(Y(ndX[i])); - // } - // } - for (size_t i = 1; i <= N - 1; i++) { - ordX.emplace_back(C(i)); + ordX.emplace_back(M(i)); } for (size_t i = 1; i <= N - 1; i++) { ordX.emplace_back(D(i)); @@ -545,8 +461,8 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { dw.positionHints['c'] = 0; dw.positionHints['d'] = 3; dw.positionHints['y'] = 2; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; + // std::cout << hfg->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } { @@ -555,10 +471,10 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { // dw.positionHints['c'] = 0; // dw.positionHints['d'] = 3; dw.positionHints['x'] = 1; - std::cout << "\n"; + // std::cout << "\n"; // std::cout << hfg->eliminateSequential(Ordering(ordX)) // ->dot(DefaultKeyFormatter, dw); - hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); + // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); } Ordering ordering_partial; @@ -566,25 +482,45 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { ordering_partial.emplace_back(X(i)); ordering_partial.emplace_back(Y(i)); } - { - HybridBayesNet::shared_ptr hbn; - HybridGaussianFactorGraph::shared_ptr remaining; - std::tie(hbn, remaining) = - hfg->eliminatePartialSequential(ordering_partial); + HybridBayesNet::shared_ptr hbn; + HybridGaussianFactorGraph::shared_ptr remaining; + std::tie(hbn, remaining) = hfg->eliminatePartialSequential(ordering_partial); - // remaining->print(); - { - DotWriter dw; - dw.positionHints['x'] = 1; - dw.positionHints['c'] = 0; - dw.positionHints['d'] = 3; - dw.positionHints['y'] = 2; - std::cout << remaining->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - } + EXPECT_LONGS_EQUAL(14, hbn->size()); + EXPECT_LONGS_EQUAL(11, remaining->size()); + + { + DotWriter dw; + dw.positionHints['x'] = 1; + dw.positionHints['c'] = 0; + dw.positionHints['d'] = 3; + dw.positionHints['y'] = 2; + // std::cout << remaining->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } } +TEST(HybridGaussianFactorGraph, optimize) { + HybridGaussianFactorGraph hfg; + + DiscreteKey c1(C(1), 2); + + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + + DecisionTree dt( + C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())); + + hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + + auto result = + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + + HybridValues hv = result->optimize(); + + EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0))); +} /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 36477218b..cb9068c30 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -74,7 +74,7 @@ TEST(GaussianMixtureFactor, Sum) { // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); - // Check that number of discrete keys is 1 // TODO(Frank): should not exist? + // Check that number of discrete keys is 1 EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); // Create sum of two mixture factors: it will be a decision tree now on both @@ -104,7 +104,7 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid x1 x2; 1 ]{ + R"(Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp new file mode 100644 index 000000000..0c15ee83d --- /dev/null +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -0,0 +1,189 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridBayesNet.cpp + * @brief Unit tests for HybridBayesNet + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using namespace gtsam::serializationTestHelpers; + +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +static const DiscreteKey Asia(0, 2); + +/* ****************************************************************************/ +// Test creation +TEST(HybridBayesNet, Creation) { + HybridBayesNet bayesNet; + + bayesNet.add(Asia, "99/1"); + + DiscreteConditional expected(Asia, "99/1"); + + CHECK(bayesNet.atDiscrete(0)); + auto& df = *bayesNet.atDiscrete(0); + EXPECT(df.equals(expected)); +} + +/* ****************************************************************************/ +// Test choosing an assignment of conditionals +TEST(HybridBayesNet, Choose) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 0; + + GaussianBayesNet gbn = hybridBayesNet->choose(assignment); + + EXPECT_LONGS_EQUAL(4, gbn.size()); + + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atMixture(0)))(assignment), + *gbn.at(0))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atMixture(1)))(assignment), + *gbn.at(1))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atMixture(2)))(assignment), + *gbn.at(2))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atMixture(3)))(assignment), + *gbn.at(3))); +} + +/* ****************************************************************************/ +// Test bayes net optimize +TEST(HybridBayesNet, OptimizeAssignment) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 1; + + VectorValues delta = hybridBayesNet->optimize(assignment); + + // The linearization point has the same value as the key index, + // e.g. X(1) = 1, X(2) = 2, + // but the factors specify X(k) = k-1, so delta should be -1. + VectorValues expected_delta; + expected_delta.insert(make_pair(X(1), -Vector1::Ones())); + expected_delta.insert(make_pair(X(2), -Vector1::Ones())); + expected_delta.insert(make_pair(X(3), -Vector1::Ones())); + expected_delta.insert(make_pair(X(4), -Vector1::Ones())); + + EXPECT(assert_equal(expected_delta, delta)); +} + +/* ****************************************************************************/ +// Test bayes net optimize +TEST(HybridBayesNet, Optimize) { + Switching s(4); + + Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesNet::shared_ptr hybridBayesNet = + s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + + HybridValues delta = hybridBayesNet->optimize(); + + DiscreteValues expectedAssignment; + expectedAssignment[M(1)] = 1; + expectedAssignment[M(2)] = 0; + expectedAssignment[M(3)] = 1; + EXPECT(assert_equal(expectedAssignment, delta.discrete())); + + VectorValues expectedValues; + expectedValues.insert(X(1), -0.999904 * Vector1::Ones()); + expectedValues.insert(X(2), -0.99029 * Vector1::Ones()); + expectedValues.insert(X(3), -1.00971 * Vector1::Ones()); + expectedValues.insert(X(4), -1.0001 * Vector1::Ones()); + + EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); +} + +/* ****************************************************************************/ +// Test bayes net multifrontal optimize +TEST(HybridBayesNet, OptimizeMultifrontal) { + Switching s(4); + + Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesTree::shared_ptr hybridBayesTree = + s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); + HybridValues delta = hybridBayesTree->optimize(); + + VectorValues expectedValues; + expectedValues.insert(X(1), -0.999904 * Vector1::Ones()); + expectedValues.insert(X(2), -0.99029 * Vector1::Ones()); + expectedValues.insert(X(3), -1.00971 * Vector1::Ones()); + expectedValues.insert(X(4), -1.0001 * Vector1::Ones()); + + EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); +} + +/* ****************************************************************************/ +// Test HybridBayesNet serialization. +TEST(HybridBayesNet, Serialization) { + Switching s(4); + Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering)); + + EXPECT(equalsObj(hbn)); + EXPECT(equalsXML(hbn)); + EXPECT(equalsBinary(hbn)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp new file mode 100644 index 000000000..0908b8cb5 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -0,0 +1,166 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridBayesTree.cpp + * @brief Unit tests for HybridBayesTree + * @author Varun Agrawal + * @date August 2022 + */ + +#include +#include +#include +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* ****************************************************************************/ +// Test for optimizing a HybridBayesTree with a given assignment. +TEST(HybridBayesTree, OptimizeAssignment) { + Switching s(4); + + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(s.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + // Add the discrete factors + for (size_t i = 7; i <= 9; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + isam.update(graph1); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 1; + + VectorValues delta = isam.optimize(assignment); + + // The linearization point has the same value as the key index, + // e.g. X(1) = 1, X(2) = 2, + // but the factors specify X(k) = k-1, so delta should be -1. + VectorValues expected_delta; + expected_delta.insert(make_pair(X(1), -Vector1::Ones())); + expected_delta.insert(make_pair(X(2), -Vector1::Ones())); + expected_delta.insert(make_pair(X(3), -Vector1::Ones())); + expected_delta.insert(make_pair(X(4), -Vector1::Ones())); + + EXPECT(assert_equal(expected_delta, delta)); + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= s.K; k++) ordering += X(k); + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + GaussianBayesNet gbn = hybridBayesNet->choose(assignment); + VectorValues expected = gbn.optimize(); + + EXPECT(assert_equal(expected, delta)); +} + +/* ****************************************************************************/ +// Test for optimizing a HybridBayesTree. +TEST(HybridBayesTree, Optimize) { + Switching s(4); + + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(s.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 6; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + // Add the discrete factors + for (size_t i = 7; i <= 9; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + isam.update(graph1); + + HybridValues delta = isam.optimize(); + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= s.K; k++) ordering += X(k); + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteFactorGraph dfg; + for (auto&& f : *remainingFactorGraph) { + auto factor = dynamic_pointer_cast(f); + dfg.push_back( + boost::dynamic_pointer_cast(factor->inner())); + } + + DiscreteValues expectedMPE = dfg.optimize(); + VectorValues expectedValues = hybridBayesNet->optimize(expectedMPE); + + EXPECT(assert_equal(expectedMPE, delta.discrete())); + EXPECT(assert_equal(expectedValues, delta.continuous())); +} + +/* ****************************************************************************/ +// Test HybridBayesTree serialization. +TEST(HybridBayesTree, Serialization) { + Switching s(4); + Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesTree hbt = + *(s.linearizedFactorGraph.eliminateMultifrontal(ordering)); + + using namespace gtsam::serializationTestHelpers; + EXPECT(equalsObj(hbt)); + EXPECT(equalsXML(hbt)); + EXPECT(equalsBinary(hbt)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp new file mode 100644 index 000000000..f5b4ec0b1 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -0,0 +1,45 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridFactorGraph.cpp + * @brief Unit tests for HybridFactorGraph + * @author Varun Agrawal + * @date May 2021 + */ + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, Constructor) { + // Initialize the hybrid factor graph + HybridFactorGraph fg; +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp new file mode 100644 index 000000000..4449aba0b --- /dev/null +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -0,0 +1,563 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridIncremental.cpp + * @brief Unit tests for incremental inference + * @author Fan Jiang, Varun Agrawal, Frank Dellaert + * @date Jan 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::W; +using symbol_shorthand::X; +using symbol_shorthand::Y; +using symbol_shorthand::Z; + +/* ****************************************************************************/ +// Test if we can perform elimination incrementally. +TEST(HybridGaussianElimination, IncrementalElimination) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // \*-M1-*/ + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Run update step + isam.update(graph1); + + // Check that after update we have 3 hybrid Bayes net nodes: + // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)}); + EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); + EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2); + + // Check that after the second update we have + // 1 additional hybrid Bayes net node: + // P(X2, X3 | M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); +} + +/* ****************************************************************************/ +// Test if we can incrementally do the inference +TEST(HybridGaussianElimination, IncrementalInference) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // | | + // *-M1 - * - M2 + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Run update step + isam.update(graph1); + + auto discreteConditional_m1 = + isam[M(1)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2); + + /********************************************************/ + // Run batch elimination so we can compare results. + Ordering ordering; + ordering += X(1); + ordering += X(2); + ordering += X(3); + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr expectedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; + std::tie(expectedHybridBayesTree, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + // The densities on X(1) should be the same + auto x1_conditional = + dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); + auto actual_x1_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); + + // The densities on X(2) should be the same + auto x2_conditional = + dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + auto actual_x2_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); + + // The densities on X(3) should be the same + auto x3_conditional = + dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); + auto actual_x3_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + + // We only perform manual continuous elimination for 0,0. + // The other discrete probabilities on M(2) are calculated the same way + Ordering discrete_ordering; + discrete_ordering += M(1); + discrete_ordering += M(2); + HybridBayesTree::shared_ptr discreteBayesTree = + expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + DiscreteConditional decisionTree = + *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + double m00_prob = decisionTree(m00); + + auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); + + // Test if the probability values are as expected with regression tests. + DiscreteValues assignment; + EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 0; + EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 0; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + + // Check if the clique conditional generated from incremental elimination + // matches that of batch elimination. + auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); + auto expectedConditional = dynamic_pointer_cast( + (*expectedChordal)[M(2)]->conditional()->inner()); + auto actualConditional = dynamic_pointer_cast( + isam[M(2)]->conditional()->inner()); + EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); +} + +/* ****************************************************************************/ +// Test if we can approximately do the inference +TEST(HybridGaussianElimination, Approx_inference) { + Switching switching(4); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr unprunedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; + std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + size_t maxNrLeaves = 5; + incrementalHybrid.update(graph1); + + incrementalHybrid.prune(M(3), maxNrLeaves); + + /* + unpruned factor is: + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf 0.11267528 + 0 0 1 Leaf 0.18576102 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0.18576102 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + + pruned factors is: + Choice(m3) + 0 Choice(m2) + 0 0 Leaf 0 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + */ + + auto discreteConditional_m1 = *dynamic_pointer_cast( + incrementalHybrid[M(1)]->conditional()->inner()); + EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + + // Get the number of elements which are greater than 0. + auto count = [](const double &value, int count) { + return value > 0 ? count + 1 : count; + }; + // Check that the number of leaves after pruning is 5. + EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + + // Check that the hybrid nodes of the bayes net match those of the pre-pruning + // bayes net, at the same positions. + auto &unprunedLastDensity = *dynamic_pointer_cast( + unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + auto &lastDensity = *dynamic_pointer_cast( + incrementalHybrid[X(4)]->conditional()->inner()); + + std::vector> assignments = + discreteConditional_m1.enumerate(); + // Loop over all assignments and check the pruned components + for (auto &&av : assignments) { + const DiscreteValues &assignment = av.first; + const double value = av.second; + + if (value == 0.0) { + EXPECT(lastDensity(assignment) == nullptr); + } else { + CHECK(lastDensity(assignment)); + EXPECT(assert_equal(*unprunedLastDensity(assignment), + *lastDensity(assignment))); + } + } +} + +/* ****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridGaussianElimination, Incremental_approximate) { + Switching switching(5); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + /***** Run Round 1 *****/ + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 5; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Run update with pruning + size_t maxComponents = 5; + incrementalHybrid.update(graph1); + incrementalHybrid.prune(M(3), maxComponents); + + // Check if we have a bayes tree with 4 hybrid nodes, + // each with 2, 4, 8, and 5 (pruned) leaves respetively. + EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 4, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + + /***** Run Round 2 *****/ + HybridGaussianFactorGraph graph2; + graph2.push_back(switching.linearizedFactorGraph.at(4)); + graph2.push_back(switching.linearizedFactorGraph.at(8)); + + // Run update with pruning a second time. + incrementalHybrid.update(graph2); + incrementalHybrid.prune(M(4), maxComponents); + + // Check if we have a bayes tree with pruned hybrid nodes, + // with 5 (pruned) leaves. + CHECK_EQUAL(5, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents()); +} + +/* ************************************************************************/ +// A GTSAM-only test for running inference on a single-legged robot. +// The leg links are represented by the chain X-Y-Z-W, where X is the base and +// W is the foot. +// We use BetweenFactor as constraints between each of the poses. +TEST(HybridGaussianISAM, NonTrivial) { + /*************** Run Round 1 ***************/ + HybridNonlinearFactorGraph fg; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and + // a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + // create a noise model for the landmark measurements + auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + + // We model a robot's single leg as X - Y - Z - W + // where X is the base link and W is the foot link. + + // Add connecting poses similar to PoseFactors in GTD + fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + poseNoise); + + // Create initial estimate + Values initial; + initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + + HybridGaussianFactorGraph gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + HybridGaussianISAM inc; + + // Update without pruning + // The result is a HybridBayesNet with no discrete variables + // (equivalent to a GaussianBayesNet). + // Factorization is: + // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + inc.update(gfg); + + /*************** Run Round 2 ***************/ + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor with discrete modes. + Pose2 odometry(1.0, 0.0, 0.0); + KeyVector contKeys = {W(0), W(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(W(0), W(1), odometry, + noise_model); + std::vector components = {moving, still}; + auto mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + poseNoise); + + initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // The leg link did not move so we set the expected pose accordingly. + initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Update without pruning + // The result is a HybridBayesNet with 1 discrete variable M(1). + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) + // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. + inc.update(gfg); + + /*************** Run Round 3 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(1), W(2)}; + still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(1), W(2), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + poseNoise); + + initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Now we prune! + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2) + // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2) + // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2) + // P(X2 | M1, M2) P(M1, M2) + // The MHS at this point should be a 2 level tree on (1, 2). + // 1 has 2 choices, and 2 has 4 choices. + inc.update(gfg); + inc.prune(M(2), 2); + + /*************** Run Round 4 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(2), W(3)}; + still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(2), W(3), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=3 + fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + poseNoise); + + initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Keep pruning! + inc.update(gfg); + inc.prune(M(3), 3); + + // The final discrete graph should not be empty since we have eliminated + // all continuous variables. + auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional(); + EXPECT_LONGS_EQUAL(3, discreteTree->size()); + + // Test if the optimal discrete mode assignment is (1, 1, 1). + DiscreteFactorGraph discreteGraph; + discreteGraph.push_back(discreteTree); + DiscreteValues optimal_assignment = discreteGraph.optimize(); + + DiscreteValues expected_assignment; + expected_assignment[M(1)] = 1; + expected_assignment[M(2)] = 1; + expected_assignment[M(3)] = 1; + + EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // Test if pruning worked correctly by checking that we only have 3 leaves in + // the last node. + auto lastConditional = inc[X(3)]->conditional()->asMixture(); + EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp new file mode 100644 index 000000000..6b689b4e3 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -0,0 +1,737 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridNonlinearFactorGraph.cpp + * @brief Unit tests for HybridNonlinearFactorGraph + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, GaussianFactorGraph) { + HybridNonlinearFactorGraph fg; + + // Add a simple prior factor to the nonlinear factor graph + fg.emplace_nonlinear>(X(0), 0, Isotropic::Sigma(1, 0.1)); + + // Linearization point + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + + HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint); + + // Add a factor to the GaussianFactorGraph + ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); + + EXPECT_LONGS_EQUAL(2, ghfg.size()); +} + +/*************************************************************************** + * Test equality for Hybrid Nonlinear Factor Graph. + */ +TEST(HybridNonlinearFactorGraph, Equals) { + HybridNonlinearFactorGraph graph1; + HybridNonlinearFactorGraph graph2; + + // Test empty factor graphs + EXPECT(assert_equal(graph1, graph2)); + + auto f0 = boost::make_shared>( + 1, Pose2(), noiseModel::Isotropic::Sigma(3, 0.001)); + graph1.push_back(f0); + graph2.push_back(f0); + + auto f1 = boost::make_shared>( + 1, 2, Pose2(), noiseModel::Isotropic::Sigma(3, 0.1)); + graph1.push_back(f1); + graph2.push_back(f1); + + // Test non-empty graphs + EXPECT(assert_equal(graph1, graph2)); +} + +/*************************************************************************** + * Test that the resize method works correctly for a HybridNonlinearFactorGraph. + */ +TEST(HybridNonlinearFactorGraph, Resize) { + HybridNonlinearFactorGraph fg; + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); + + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); + + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 3); + + fg.resize(0); + EXPECT_LONGS_EQUAL(fg.size(), 0); +} + +/*************************************************************************** + * Test that the resize method works correctly for a + * HybridGaussianFactorGraph. + */ +TEST(HybridGaussianFactorGraph, Resize) { + HybridNonlinearFactorGraph nhfg; + auto nonlinearFactor = boost::make_shared>( + X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); + nhfg.push_back(nonlinearFactor); + auto discreteFactor = boost::make_shared(); + nhfg.push_back(discreteFactor); + + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), + moving = boost::make_shared(X(0), X(1), 1.0, noise_model); + + std::vector components = {still, moving}; + auto dcFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + nhfg.push_back(dcFactor); + + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + linearizationPoint.insert(X(1), 1); + + // Generate `HybridGaussianFactorGraph` by linearizing + HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint); + + EXPECT_LONGS_EQUAL(gfg.size(), 3); + + gfg.resize(0); + EXPECT_LONGS_EQUAL(gfg.size(), 0); +} + +/*************************************************************************** + * Test that the MixtureFactor reports correctly if the number of continuous + * keys provided do not match the keys in the factors. + */ +TEST(HybridGaussianFactorGraph, MixtureFactor) { + auto nonlinearFactor = boost::make_shared>( + X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); + auto discreteFactor = boost::make_shared(); + + auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), + moving = boost::make_shared(X(0), X(1), 1.0, noise_model); + + std::vector components = {still, moving}; + + // Check for exception when number of continuous keys are under-specified. + KeyVector contKeys = {X(0)}; + THROWS_EXCEPTION(boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); + + // Check for exception when number of continuous keys are too many. + contKeys = {X(0), X(1), X(2)}; + THROWS_EXCEPTION(boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); +} + +/***************************************************************************** + * Test push_back on HFG makes the correct distinction. + */ +TEST(HybridFactorGraph, PushBack) { + HybridNonlinearFactorGraph fg; + + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + fg = HybridNonlinearFactorGraph(); + + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + fg = HybridNonlinearFactorGraph(); + + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + // Now do the same with HybridGaussianFactorGraph + HybridGaussianFactorGraph ghfg; + + auto gaussianFactor = boost::make_shared(); + ghfg.push_back(gaussianFactor); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + ghfg = HybridGaussianFactorGraph(); + ghfg.push_back(discreteFactor); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + ghfg = HybridGaussianFactorGraph(); + ghfg.push_back(dcFactor); + + HybridGaussianFactorGraph hgfg2; + hgfg2.push_back(ghfg.begin(), ghfg.end()); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + HybridNonlinearFactorGraph hnfg; + NonlinearFactorGraph factors; + auto noise = noiseModel::Isotropic::Sigma(3, 1.0); + factors.emplace_shared>(0, Pose2(0, 0, 0), noise); + factors.emplace_shared>(1, Pose2(1, 0, 0), noise); + factors.emplace_shared>(2, Pose2(2, 0, 0), noise); + // TODO(Varun) This does not currently work. It should work once HybridFactor + // becomes a base class of NonlinearFactor. + // hnfg.push_back(factors.begin(), factors.end()); + + // EXPECT_LONGS_EQUAL(3, hnfg.size()); +} + +/**************************************************************************** + * Test construction of switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Switching) { + Switching self(3); + + EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size()); + EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size()); +} + +/**************************************************************************** + * Test linearization on a switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Linearization) { + Switching self(3); + + // Linearize here: + HybridGaussianFactorGraph actualLinearized = + self.nonlinearFactorGraph.linearize(self.linearizationPoint); + + EXPECT_LONGS_EQUAL(7, actualLinearized.size()); +} + +/**************************************************************************** + * Test elimination tree construction + */ +TEST(HybridFactorGraph, EliminationTree) { + Switching self(3); + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Create elimination tree. + HybridEliminationTree etree(self.linearizedFactorGraph, ordering); + EXPECT_LONGS_EQUAL(1, etree.roots().size()) +} + +/**************************************************************************** + *Test elimination function by eliminating x1 in *-x1-*-x2 graph. + */ +TEST(GaussianElimination, Eliminate_x1) { + Switching self(3); + + // Gather factors on x1, has a simple Gaussian and a mixture factor. + HybridGaussianFactorGraph factors; + // Add gaussian prior + factors.push_back(self.linearizedFactorGraph[0]); + // Add first hybrid factor + factors.push_back(self.linearizedFactorGraph[1]); + + // Eliminate x1 + Ordering ordering; + ordering += X(1); + + auto result = EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Has two keys, x2 and m1 + EXPECT_LONGS_EQUAL(2, result.second->size()); +} + +/**************************************************************************** + * Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. + * m1/ \m2 + */ +TEST(HybridsGaussianElimination, Eliminate_x2) { + Switching self(3); + + // Gather factors on x2, will be two mixture factors (with x1 and x3, resp.). + HybridGaussianFactorGraph factors; + factors.push_back(self.linearizedFactorGraph[1]); // involves m1 + factors.push_back(self.linearizedFactorGraph[2]); // involves m2 + + // Eliminate x2 + Ordering ordering; + ordering += X(2); + + std::pair result = + EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Note: separator keys should include m1, m2. + EXPECT_LONGS_EQUAL(4, result.second->size()); +} + +/**************************************************************************** + * Helper method to generate gaussian factor graphs with a specific mode. + */ +GaussianFactorGraph::shared_ptr batchGFG(double between, + Values linearizationPoint) { + NonlinearFactorGraph graph; + graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); + + auto between_x1_x2 = boost::make_shared( + X(1), X(2), between, Isotropic::Sigma(1, 1.0)); + + graph.push_back(between_x1_x2); + + return graph.linearize(linearizationPoint); +} + +/**************************************************************************** + * Test elimination function by eliminating x1 and x2 in graph. + */ +TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { + Switching self(2, 1.0, 0.1); + + auto factors = self.linearizedFactorGraph; + + // Eliminate x1 + Ordering ordering; + ordering += X(1); + ordering += X(2); + + HybridConditional::shared_ptr hybridConditionalMixture; + HybridFactor::shared_ptr factorOnModes; + + std::tie(hybridConditionalMixture, factorOnModes) = + EliminateHybrid(factors, ordering); + + auto gaussianConditionalMixture = + dynamic_pointer_cast(hybridConditionalMixture->inner()); + + CHECK(gaussianConditionalMixture); + // Frontals = [x1, x2] + EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); + // 1 parent, which is the mode + EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); + + // This is now a HybridDiscreteFactor + auto hybridDiscreteFactor = + dynamic_pointer_cast(factorOnModes); + // Access the type-erased inner object and convert to DecisionTreeFactor + auto discreteFactor = + dynamic_pointer_cast(hybridDiscreteFactor->inner()); + CHECK(discreteFactor); + EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); + EXPECT(discreteFactor->root_->isLeaf() == false); + + // TODO(Varun) Test emplace_discrete +} + +/**************************************************************************** + * Test partial elimination + */ +TEST(HybridFactorGraph, Partial_Elimination) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + CHECK(hybridBayesNet); + EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + + CHECK(remainingFactorGraph); + EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); + EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)})); + EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)})); + EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)})); +} + +/**************************************************************************** + * Test full elimination + */ +TEST(HybridFactorGraph, Full_Elimination) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // We first do a partial elimination + HybridBayesNet::shared_ptr hybridBayesNet_partial; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial; + DiscreteBayesNet discreteBayesNet; + + { + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteFactorGraph discrete_fg; + // TODO(Varun) Make this a function of HybridGaussianFactorGraph? + for (HybridFactor::shared_ptr& factor : (*remainingFactorGraph_partial)) { + auto df = dynamic_pointer_cast(factor); + discrete_fg.push_back(df->inner()); + } + ordering.clear(); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + discreteBayesNet = + *discrete_fg.eliminateSequential(ordering, EliminateForMPE); + } + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet = + linearizedFactorGraph.eliminateSequential(ordering); + + CHECK(hybridBayesNet); + EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); + // p(x1 | x2, m1) + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + // p(x2 | x3, m1, m2) + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + // p(x3 | m1, m2) + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + // P(m1 | m2) + EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); + EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) + ->equals(*discreteBayesNet.at(0))); + // P(m2) + EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); + EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) + ->equals(*discreteBayesNet.at(1))); +} + +/**************************************************************************** + * Test printing + */ +TEST(HybridFactorGraph, Printing) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + string expected_hybridFactorGraph = R"( +size: 7 +factor 0: Continuous [x1] + + A[x1] = [ + 10 +] + b = [ -10 ] + No noise model +factor 1: Hybrid [x1 x2; m1]{ + Choice(m1) + 0 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 2: Hybrid [x2 x3; m2]{ + Choice(m2) + 0 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 3: Continuous [x2] + + A[x2] = [ + 10 +] + b = [ -10 ] + No noise model +factor 4: Continuous [x3] + + A[x3] = [ + 10 +] + b = [ -10 ] + No noise model +factor 5: Discrete [m1] + P( m1 ): + Leaf 0.5 + +factor 6: Discrete [m2 m1] + P( m2 | m1 ): + Choice(m2) + 0 Choice(m1) + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 + 1 Choice(m1) + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 + +)"; + EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph)); + + // Expected output for hybridBayesNet. + string expected_hybridBayesNet = R"( +size: 3 +factor 0: Hybrid P( x1 | x2 m1) + Discrete Keys = (m1, 2), + Choice(m1) + 0 Leaf p(x1 | x2) + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.85087 ] + No noise model + + 1 Leaf p(x1 | x2) + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.95037 ] + No noise model + +factor 1: Hybrid P( x2 | x3 m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.99901 ] + No noise model + + 0 1 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.90098 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10.098 ] + No noise model + + 1 1 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10 ] + No noise model + +factor 2: Hybrid P( x3 | m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1489 ] + No noise model + + 0 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1479 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0504 ] + No noise model + + 1 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0494 ] + No noise model + +)"; + EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); +} + +/**************************************************************************** + * Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose + * connects to 1 landmark) to expose issue with default decision tree creation + * in hybrid elimination. The hybrid factor is between the poses X0 and X1. + * The issue arises if we eliminate a landmark variable first since it is not + * connected to a HybridFactor. + */ +TEST(HybridFactorGraph, DefaultDecisionTree) { + HybridNonlinearFactorGraph fg; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor + Pose2 odometry(2.0, 0.0, 0.0); + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(X(0), X(1), odometry, + noise_model); + std::vector motion_models = {still, moving}; + fg.emplace_hybrid( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); + + // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. + // create a noise model for the landmark measurements + auto measurementNoise = noiseModel::Diagonal::Sigmas( + Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range + // create the measurement values - indices are (pose id, landmark id) + Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90); + double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0; + + // Add Bearing-Range factors + fg.emplace_nonlinear>( + X(0), L(0), bearing11, range11, measurementNoise); + fg.emplace_nonlinear>( + X(1), L(1), bearing22, range22, measurementNoise); + + // Create (deliberately inaccurate) initial estimate + Values initialEstimate; + initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2)); + initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2)); + initialEstimate.insert(L(0), Point2(1.8, 2.1)); + initialEstimate.insert(L(1), Point2(4.1, 1.8)); + + // We want to eliminate variables not connected to DCFactors first. + Ordering ordering; + ordering += L(0); + ordering += L(1); + ordering += X(0); + ordering += X(1); + + HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); + gtsam::HybridBayesNet::shared_ptr hybridBayesNet; + gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + + // This should NOT fail + std::tie(hybridBayesNet, remainingFactorGraph) = + linearized.eliminatePartialSequential(ordering); + EXPECT_LONGS_EQUAL(4, hybridBayesNet->size()); + EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridValues.cpp b/gtsam/hybrid/tests/testHybridValues.cpp new file mode 100644 index 000000000..6f510601d --- /dev/null +++ b/gtsam/hybrid/tests/testHybridValues.cpp @@ -0,0 +1,58 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridValues.cpp + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; + +TEST(HybridValues, basics) { + HybridValues values; + values.insert(99, Vector2(2, 3)); + values.insert(100, 3); + EXPECT(assert_equal(values.at(99), Vector2(2, 3))); + EXPECT(assert_equal(values.atDiscrete(100), int(3))); + + values.print(); + + HybridValues values2; + values2.insert(100, 3); + values2.insert(99, Vector2(2, 3)); + EXPECT(assert_equal(values2, values)); + + values2.insert(98, Vector2(2, 3)); + EXPECT(!assert_equal(values2, values)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 101134c83..89fd09037 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -116,7 +116,7 @@ class FactorGraph { using HasDerivedValueType = typename std::enable_if< std::is_base_of::value>::type; - /// Check if T has a value_type derived from FactorType. + /// Check if T has a pointer type derived from FactorType. template using HasDerivedElementType = typename std::enable_if::value>::type; diff --git a/gtsam/inference/JunctionTree-inst.h b/gtsam/inference/JunctionTree-inst.h index 04472f7e3..1c68aa0da 100644 --- a/gtsam/inference/JunctionTree-inst.h +++ b/gtsam/inference/JunctionTree-inst.h @@ -33,7 +33,7 @@ struct ConstructorTraversalData { typedef typename JunctionTree::sharedNode sharedNode; ConstructorTraversalData* const parentData; - sharedNode myJTNode; + sharedNode junctionTreeNode; FastVector childSymbolicConditionals; FastVector childSymbolicFactors; @@ -53,8 +53,9 @@ struct ConstructorTraversalData { // a traversal data structure with its own JT node, and create a child // pointer in its parent. ConstructorTraversalData myData = ConstructorTraversalData(&parentData); - myData.myJTNode = boost::make_shared(node->key, node->factors); - parentData.myJTNode->addChild(myData.myJTNode); + myData.junctionTreeNode = + boost::make_shared(node->key, node->factors); + parentData.junctionTreeNode->addChild(myData.junctionTreeNode); return myData; } @@ -91,7 +92,7 @@ struct ConstructorTraversalData { myData.parentData->childSymbolicConditionals.push_back(myConditional); myData.parentData->childSymbolicFactors.push_back(mySeparatorFactor); - sharedNode node = myData.myJTNode; + sharedNode node = myData.junctionTreeNode; const FastVector& childConditionals = myData.childSymbolicConditionals; node->problemSize_ = (int) (myConditional->size() * symbolicFactors.size()); @@ -138,14 +139,14 @@ JunctionTree::JunctionTree( typedef typename EliminationTree::Node ETreeNode; typedef ConstructorTraversalData Data; Data rootData(0); - rootData.myJTNode = boost::make_shared(); // Make a dummy node to gather - // the junction tree roots + // Make a dummy node to gather the junction tree roots + rootData.junctionTreeNode = boost::make_shared(); treeTraversal::DepthFirstForest(eliminationTree, rootData, Data::ConstructorTraversalVisitorPre, Data::ConstructorTraversalVisitorPostAlg2); // Assign roots from the dummy node - this->addChildrenAsRoots(rootData.myJTNode); + this->addChildrenAsRoots(rootData.junctionTreeNode); // Transfer remaining factors from elimination tree Base::remainingFactors_ = eliminationTree.remainingFactors(); diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index b77b26240..98bf5df88 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file SymbolicISAM.h + * @file GaussianISAM.h * @date July 29, 2013 * @author Frank Dellaert * @author Richard Roberts diff --git a/gtsam/linear/tests/testSerializationLinear.cpp b/gtsam/linear/tests/testSerializationLinear.cpp index 881b2830e..ee21de364 100644 --- a/gtsam/linear/tests/testSerializationLinear.cpp +++ b/gtsam/linear/tests/testSerializationLinear.cpp @@ -198,6 +198,33 @@ TEST (Serialization, gaussian_factor_graph) { EXPECT(equalsBinary(graph)); } +/* ****************************************************************************/ +TEST(Serialization, gaussian_bayes_net) { + // Create an arbitrary Bayes Net + GaussianBayesNet gbn; + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 0, Vector2(1.0, 2.0), (Matrix2() << 3.0, 4.0, 0.0, 6.0).finished(), 3, + (Matrix2() << 7.0, 8.0, 9.0, 10.0).finished(), 4, + (Matrix2() << 11.0, 12.0, 13.0, 14.0).finished())); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 1, Vector2(15.0, 16.0), (Matrix2() << 17.0, 18.0, 0.0, 20.0).finished(), + 2, (Matrix2() << 21.0, 22.0, 23.0, 24.0).finished(), 4, + (Matrix2() << 25.0, 26.0, 27.0, 28.0).finished())); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 2, Vector2(29.0, 30.0), (Matrix2() << 31.0, 32.0, 0.0, 34.0).finished(), + 3, (Matrix2() << 35.0, 36.0, 37.0, 38.0).finished())); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 3, Vector2(39.0, 40.0), (Matrix2() << 41.0, 42.0, 0.0, 44.0).finished(), + 4, (Matrix2() << 45.0, 46.0, 47.0, 48.0).finished())); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 4, Vector2(49.0, 50.0), (Matrix2() << 51.0, 52.0, 0.0, 54.0).finished())); + + std::string serialized = serialize(gbn); + GaussianBayesNet actual; + deserialize(serialized, actual); + EXPECT(assert_equal(gbn, actual)); +} + /* ************************************************************************* */ TEST (Serialization, gaussian_bayes_tree) { const Key x1=1, x2=2, x3=3, x4=4; diff --git a/gtsam/navigation/CombinedImuFactor.cpp b/gtsam/navigation/CombinedImuFactor.cpp index 3fe2cf4d1..8d3a7dd31 100644 --- a/gtsam/navigation/CombinedImuFactor.cpp +++ b/gtsam/navigation/CombinedImuFactor.cpp @@ -93,9 +93,14 @@ void PreintegratedCombinedMeasurements::resetIntegration() { //------------------------------------------------------------------------------ void PreintegratedCombinedMeasurements::integrateMeasurement( const Vector3& measuredAcc, const Vector3& measuredOmega, double dt) { + if (dt <= 0) { + throw std::runtime_error( + "PreintegratedCombinedMeasurements::integrateMeasurement: dt <=0"); + } + // Update preintegrated measurements. - Matrix9 A; // overall Jacobian wrt preintegrated measurements (df/dx) - Matrix93 B, C; + Matrix9 A; // Jacobian wrt preintegrated measurements without bias (df/dx) + Matrix93 B, C; // Jacobian of state wrpt accel bias and omega bias respectively. PreintegrationType::update(measuredAcc, measuredOmega, dt, &A, &B, &C); // Update preintegrated measurements covariance: as in [2] we consider a first @@ -105,47 +110,78 @@ void PreintegratedCombinedMeasurements::integrateMeasurement( // and preintegrated measurements // Single Jacobians to propagate covariance - // TODO(frank): should we not also account for bias on position? - Matrix3 theta_H_biasOmega = -C.topRows<3>(); - Matrix3 vel_H_biasAcc = -B.bottomRows<3>(); + Matrix3 theta_H_biasOmega = C.topRows<3>(); + Matrix3 pos_H_biasAcc = B.middleRows<3>(3); + Matrix3 vel_H_biasAcc = B.bottomRows<3>(); + + Matrix3 theta_H_biasOmegaInit = -theta_H_biasOmega; + Matrix3 pos_H_biasAccInit = -pos_H_biasAcc; + Matrix3 vel_H_biasAccInit = -vel_H_biasAcc; // overall Jacobian wrt preintegrated measurements (df/dx) Eigen::Matrix F; F.setZero(); F.block<9, 9>(0, 0) = A; F.block<3, 3>(0, 12) = theta_H_biasOmega; + F.block<3, 3>(3, 9) = pos_H_biasAcc; F.block<3, 3>(6, 9) = vel_H_biasAcc; F.block<6, 6>(9, 9) = I_6x6; + // Update the uncertainty on the state (matrix F in [4]). + preintMeasCov_ = F * preintMeasCov_ * F.transpose(); + // propagate uncertainty // TODO(frank): use noiseModel routine so we can have arbitrary noise models. const Matrix3& aCov = p().accelerometerCovariance; const Matrix3& wCov = p().gyroscopeCovariance; const Matrix3& iCov = p().integrationCovariance; + const Matrix6& bInitCov = p().biasAccOmegaInt; // first order uncertainty propagation - // Optimized matrix multiplication (1/dt) * G * measurementCovariance * - // G.transpose() + // Optimized matrix mult: (1/dt) * G * measurementCovariance * G.transpose() Eigen::Matrix G_measCov_Gt; G_measCov_Gt.setZero(15, 15); + const Matrix3& bInitCov11 = bInitCov.block<3, 3>(0, 0) / dt; + const Matrix3& bInitCov12 = bInitCov.block<3, 3>(0, 3) / dt; + const Matrix3& bInitCov21 = bInitCov.block<3, 3>(3, 0) / dt; + const Matrix3& bInitCov22 = bInitCov.block<3, 3>(3, 3) / dt; + // BLOCK DIAGONAL TERMS - D_t_t(&G_measCov_Gt) = dt * iCov; - D_v_v(&G_measCov_Gt) = (1 / dt) * vel_H_biasAcc - * (aCov + p().biasAccOmegaInt.block<3, 3>(0, 0)) - * (vel_H_biasAcc.transpose()); - D_R_R(&G_measCov_Gt) = (1 / dt) * theta_H_biasOmega - * (wCov + p().biasAccOmegaInt.block<3, 3>(3, 3)) - * (theta_H_biasOmega.transpose()); + D_R_R(&G_measCov_Gt) = + (theta_H_biasOmega * (wCov / dt) * theta_H_biasOmega.transpose()) // + + + (theta_H_biasOmegaInit * bInitCov22 * theta_H_biasOmegaInit.transpose()); + + D_t_t(&G_measCov_Gt) = + (pos_H_biasAcc * (aCov / dt) * pos_H_biasAcc.transpose()) // + + (pos_H_biasAccInit * bInitCov11 * pos_H_biasAccInit.transpose()) // + + (dt * iCov); + + D_v_v(&G_measCov_Gt) = + (vel_H_biasAcc * (aCov / dt) * vel_H_biasAcc.transpose()) // + + (vel_H_biasAccInit * bInitCov11 * vel_H_biasAccInit.transpose()); + D_a_a(&G_measCov_Gt) = dt * p().biasAccCovariance; D_g_g(&G_measCov_Gt) = dt * p().biasOmegaCovariance; // OFF BLOCK DIAGONAL TERMS - Matrix3 temp = vel_H_biasAcc * p().biasAccOmegaInt.block<3, 3>(3, 0) - * theta_H_biasOmega.transpose(); - D_v_R(&G_measCov_Gt) = temp; - D_R_v(&G_measCov_Gt) = temp.transpose(); - preintMeasCov_ = F * preintMeasCov_ * F.transpose() + G_measCov_Gt; + D_R_t(&G_measCov_Gt) = + theta_H_biasOmegaInit * bInitCov21 * pos_H_biasAccInit.transpose(); + D_R_v(&G_measCov_Gt) = + theta_H_biasOmegaInit * bInitCov21 * vel_H_biasAccInit.transpose(); + D_t_R(&G_measCov_Gt) = + pos_H_biasAccInit * bInitCov12 * theta_H_biasOmegaInit.transpose(); + D_t_v(&G_measCov_Gt) = + (pos_H_biasAcc * (aCov / dt) * vel_H_biasAcc.transpose()) + + (pos_H_biasAccInit * bInitCov11 * vel_H_biasAccInit.transpose()); + D_v_R(&G_measCov_Gt) = + vel_H_biasAccInit * bInitCov12 * theta_H_biasOmegaInit.transpose(); + D_v_t(&G_measCov_Gt) = + (vel_H_biasAcc * (aCov / dt) * pos_H_biasAcc.transpose()) + + (vel_H_biasAccInit * bInitCov11 * pos_H_biasAccInit.transpose()); + + preintMeasCov_.noalias() += G_measCov_Gt; } //------------------------------------------------------------------------------ @@ -253,6 +289,5 @@ std::ostream& operator<<(std::ostream& os, const CombinedImuFactor& f) { os << " noise model sigmas: " << f.noiseModel_->sigmas().transpose(); return os; } -} - /// namespace gtsam +} // namespace gtsam diff --git a/gtsam/navigation/CombinedImuFactor.h b/gtsam/navigation/CombinedImuFactor.h index 54c5a7dbb..69d72ad9b 100644 --- a/gtsam/navigation/CombinedImuFactor.h +++ b/gtsam/navigation/CombinedImuFactor.h @@ -51,6 +51,7 @@ typedef ManifoldPreintegration PreintegrationType; * TRO, 28(1):61-76, 2012. * [3] L. Carlone, S. Williams, R. Roberts, "Preintegrated IMU factor: * Computation of the Jacobian Matrices", Tech. Report, 2013. + * Available in this repo as "PreintegratedIMUJacobians.pdf". * [4] C. Forster, L. Carlone, F. Dellaert, D. Scaramuzza, IMU Preintegration on * Manifold for Efficient Visual-Inertial Maximum-a-Posteriori Estimation, * Robotics: Science and Systems (RSS), 2015. @@ -61,7 +62,7 @@ typedef ManifoldPreintegration PreintegrationType; struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { Matrix3 biasAccCovariance; ///< continuous-time "Covariance" describing accelerometer bias random walk Matrix3 biasOmegaCovariance; ///< continuous-time "Covariance" describing gyroscope bias random walk - Matrix6 biasAccOmegaInt; ///< covariance of bias used for pre-integration + Matrix6 biasAccOmegaInt; ///< covariance of bias used as initial estimate. /// Default constructor makes uninitialized params struct. /// Used for serialization. @@ -92,11 +93,11 @@ struct GTSAM_EXPORT PreintegrationCombinedParams : PreintegrationParams { void setBiasAccCovariance(const Matrix3& cov) { biasAccCovariance=cov; } void setBiasOmegaCovariance(const Matrix3& cov) { biasOmegaCovariance=cov; } - void setBiasAccOmegaInt(const Matrix6& cov) { biasAccOmegaInt=cov; } + void setBiasAccOmegaInit(const Matrix6& cov) { biasAccOmegaInt=cov; } const Matrix3& getBiasAccCovariance() const { return biasAccCovariance; } const Matrix3& getBiasOmegaCovariance() const { return biasOmegaCovariance; } - const Matrix6& getBiasAccOmegaInt() const { return biasAccOmegaInt; } + const Matrix6& getBiasAccOmegaInit() const { return biasAccOmegaInt; } private: diff --git a/gtsam/navigation/ImuBias.h b/gtsam/navigation/ImuBias.h index 9346a4a77..936ae7782 100644 --- a/gtsam/navigation/ImuBias.h +++ b/gtsam/navigation/ImuBias.h @@ -105,7 +105,7 @@ public: /// @{ /** identity for group operation */ - static ConstantBias identity() { + static ConstantBias Identity() { return ConstantBias(); } diff --git a/gtsam/navigation/ImuFactor.cpp b/gtsam/navigation/ImuFactor.cpp index 28c0461b1..9b6affaaf 100644 --- a/gtsam/navigation/ImuFactor.cpp +++ b/gtsam/navigation/ImuFactor.cpp @@ -59,7 +59,7 @@ void PreintegratedImuMeasurements::integrateMeasurement( // Update preintegrated measurements (also get Jacobian) Matrix9 A; // overall Jacobian wrt preintegrated measurements (df/dx) - Matrix93 B, C; + Matrix93 B, C; // Jacobian of state wrpt accel bias and omega bias respectively. PreintegrationType::update(measuredAcc, measuredOmega, dt, &A, &B, &C); // first order covariance propagation: @@ -73,11 +73,13 @@ void PreintegratedImuMeasurements::integrateMeasurement( const Matrix3& iCov = p().integrationCovariance; // (1/dt) allows to pass from continuous time noise to discrete time noise + // Update the uncertainty on the state (matrix A in [4]). preintMeasCov_ = A * preintMeasCov_ * A.transpose(); + // These 2 updates account for uncertainty on the IMU measurement (matrix B in [4]). preintMeasCov_.noalias() += B * (aCov / dt) * B.transpose(); preintMeasCov_.noalias() += C * (wCov / dt) * C.transpose(); - // NOTE(frank): (Gi*dt)*(C/dt)*(Gi'*dt), with Gi << Z_3x3, I_3x3, Z_3x3 + // NOTE(frank): (Gi*dt)*(C/dt)*(Gi'*dt), with Gi << Z_3x3, I_3x3, Z_3x3 (9x3 matrix) preintMeasCov_.block<3, 3>(3, 3).noalias() += iCov * dt; } diff --git a/gtsam/navigation/ImuFactor.h b/gtsam/navigation/ImuFactor.h index c3b398e22..6765c8b42 100644 --- a/gtsam/navigation/ImuFactor.h +++ b/gtsam/navigation/ImuFactor.h @@ -53,6 +53,7 @@ typedef ManifoldPreintegration PreintegrationType; * TRO, 28(1):61-76, 2012. * [3] L. Carlone, S. Williams, R. Roberts, "Preintegrated IMU factor: * Computation of the Jacobian Matrices", Tech. Report, 2013. + * Available in this repo as "PreintegratedIMUJacobians.pdf". * [4] C. Forster, L. Carlone, F. Dellaert, D. Scaramuzza, "IMU Preintegration on * Manifold for Efficient Visual-Inertial Maximum-a-Posteriori Estimation", * Robotics: Science and Systems (RSS), 2015. diff --git a/gtsam/navigation/PreintegrationBase.cpp b/gtsam/navigation/PreintegrationBase.cpp index 8840c34e9..f6e9fccb8 100644 --- a/gtsam/navigation/PreintegrationBase.cpp +++ b/gtsam/navigation/PreintegrationBase.cpp @@ -157,9 +157,9 @@ Vector9 PreintegrationBase::computeError(const NavState& state_i, state_j.localCoordinates(predictedState_j, H2 ? &D_error_state_j : 0, H1 || H3 ? &D_error_predict : 0); - if (H1) *H1 << D_error_predict* D_predict_state_i; + if (H1) *H1 << D_error_predict * D_predict_state_i; if (H2) *H2 << D_error_state_j; - if (H3) *H3 << D_error_predict* D_predict_bias_i; + if (H3) *H3 << D_error_predict * D_predict_bias_i; return error; } diff --git a/gtsam/navigation/ScenarioRunner.cpp b/gtsam/navigation/ScenarioRunner.cpp index 3938ce86c..9d3e258de 100644 --- a/gtsam/navigation/ScenarioRunner.cpp +++ b/gtsam/navigation/ScenarioRunner.cpp @@ -15,8 +15,8 @@ * @author Frank Dellaert */ -#include #include +#include #include #include @@ -105,4 +105,62 @@ Matrix6 ScenarioRunner::estimateNoiseCovariance(size_t N) const { return Q / (N - 1); } +PreintegratedCombinedMeasurements CombinedScenarioRunner::integrate( + double T, const Bias& estimatedBias, bool corrupted) const { + gttic_(integrate); + PreintegratedCombinedMeasurements pim(p_, estimatedBias); + + const double dt = imuSampleTime(); + const size_t nrSteps = T / dt; + double t = 0; + for (size_t k = 0; k < nrSteps; k++, t += dt) { + Vector3 measuredOmega = + corrupted ? measuredAngularVelocity(t) : actualAngularVelocity(t); + Vector3 measuredAcc = + corrupted ? measuredSpecificForce(t) : actualSpecificForce(t); + pim.integrateMeasurement(measuredAcc, measuredOmega, dt); + } + + return pim; +} + +NavState CombinedScenarioRunner::predict( + const PreintegratedCombinedMeasurements& pim, + const Bias& estimatedBias) const { + const NavState state_i(scenario().pose(0), scenario().velocity_n(0)); + return pim.predict(state_i, estimatedBias); +} + +Eigen::Matrix CombinedScenarioRunner::estimateCovariance( + double T, size_t N, const Bias& estimatedBias) const { + gttic_(estimateCovariance); + + // Get predict prediction from ground truth measurements + NavState prediction = predict(integrate(T)); + + // Sample ! + Matrix samples(15, N); + Vector15 sum = Vector15::Zero(); + for (size_t i = 0; i < N; i++) { + auto pim = integrate(T, estimatedBias, true); + NavState sampled = predict(pim); + Vector15 xi = Vector15::Zero(); + xi << sampled.localCoordinates(prediction), + (estimatedBias_.vector() - estimatedBias.vector()); + samples.col(i) = xi; + sum += xi; + } + + // Compute MC covariance + Vector15 sampleMean = sum / N; + Eigen::Matrix Q; + Q.setZero(); + for (size_t i = 0; i < N; i++) { + Vector15 xi = samples.col(i) - sampleMean; + Q += xi * xi.transpose(); + } + + return Q / (N - 1); +} + } // namespace gtsam diff --git a/gtsam/navigation/ScenarioRunner.h b/gtsam/navigation/ScenarioRunner.h index 1577e36fe..cee5a54ab 100644 --- a/gtsam/navigation/ScenarioRunner.h +++ b/gtsam/navigation/ScenarioRunner.h @@ -16,9 +16,10 @@ */ #pragma once +#include +#include #include #include -#include namespace gtsam { @@ -66,10 +67,10 @@ class GTSAM_EXPORT ScenarioRunner { // also, uses g=10 for easy debugging const Vector3& gravity_n() const { return p_->n_gravity; } + const Scenario& scenario() const { return scenario_; } + // A gyro simply measures angular velocity in body frame - Vector3 actualAngularVelocity(double t) const { - return scenario_.omega_b(t); - } + Vector3 actualAngularVelocity(double t) const { return scenario_.omega_b(t); } // An accelerometer measures acceleration in body, but not gravity Vector3 actualSpecificForce(double t) const { @@ -106,4 +107,39 @@ class GTSAM_EXPORT ScenarioRunner { Matrix6 estimateNoiseCovariance(size_t N = 1000) const; }; +/* + * Simple class to test navigation scenarios with CombinedImuMeasurements. + * Takes a trajectory scenario as input, and can generate IMU measurements + */ +class GTSAM_EXPORT CombinedScenarioRunner : public ScenarioRunner { + public: + typedef boost::shared_ptr SharedParams; + + private: + const SharedParams p_; + const Bias estimatedBias_; + + public: + CombinedScenarioRunner(const Scenario& scenario, const SharedParams& p, + double imuSampleTime = 1.0 / 100.0, + const Bias& bias = Bias()) + : ScenarioRunner(scenario, static_cast(p), + imuSampleTime, bias), + p_(p), + estimatedBias_(bias) {} + + /// Integrate measurements for T seconds into a PIM + PreintegratedCombinedMeasurements integrate( + double T, const Bias& estimatedBias = Bias(), + bool corrupted = false) const; + + /// Predict predict given a PIM + NavState predict(const PreintegratedCombinedMeasurements& pim, + const Bias& estimatedBias = Bias()) const; + + /// Compute a Monte Carlo estimate of the predict covariance using N samples + Eigen::Matrix estimateCovariance( + double T, size_t N = 1000, const Bias& estimatedBias = Bias()) const; +}; + } // namespace gtsam diff --git a/gtsam/navigation/navigation.i b/gtsam/navigation/navigation.i index 6ede1645f..731cf3807 100644 --- a/gtsam/navigation/navigation.i +++ b/gtsam/navigation/navigation.i @@ -17,7 +17,7 @@ class ConstantBias { bool equals(const gtsam::imuBias::ConstantBias& expected, double tol) const; // Group - static gtsam::imuBias::ConstantBias identity(); + static gtsam::imuBias::ConstantBias Identity(); // Operator Overloads gtsam::imuBias::ConstantBias operator-() const; @@ -165,11 +165,11 @@ virtual class PreintegrationCombinedParams : gtsam::PreintegrationParams { void setBiasAccCovariance(Matrix cov); void setBiasOmegaCovariance(Matrix cov); - void setBiasAccOmegaInt(Matrix cov); + void setBiasAccOmegaInit(Matrix cov); Matrix getBiasAccCovariance() const ; Matrix getBiasOmegaCovariance() const ; - Matrix getBiasAccOmegaInt() const; + Matrix getBiasAccOmegaInit() const; }; diff --git a/gtsam/navigation/tests/testCombinedImuFactor.cpp b/gtsam/navigation/tests/testCombinedImuFactor.cpp index 2bbc2cc7c..aacfff0f0 100644 --- a/gtsam/navigation/tests/testCombinedImuFactor.cpp +++ b/gtsam/navigation/tests/testCombinedImuFactor.cpp @@ -16,18 +16,19 @@ * @author Frank Dellaert * @author Richard Roberts * @author Stephen Williams + * @author Varun Agrawal */ -#include -#include -#include -#include -#include -#include +#include #include #include - -#include +#include +#include +#include +#include +#include +#include +#include #include @@ -40,12 +41,15 @@ static boost::shared_ptr Params() { p->gyroscopeCovariance = kGyroSigma * kGyroSigma * I_3x3; p->accelerometerCovariance = kAccelSigma * kAccelSigma * I_3x3; p->integrationCovariance = 0.0001 * I_3x3; + p->biasAccCovariance = Z_3x3; + p->biasOmegaCovariance = Z_3x3; + p->biasAccOmegaInt = Z_6x6; return p; } -} +} // namespace testing /* ************************************************************************* */ -TEST( CombinedImuFactor, PreintegratedMeasurements ) { +TEST(CombinedImuFactor, PreintegratedMeasurements ) { // Linearization point Bias bias(Vector3(0, 0, 0), Vector3(0, 0, 0)); ///< Current estimate of acceleration and angular rate biases @@ -71,8 +75,9 @@ TEST( CombinedImuFactor, PreintegratedMeasurements ) { DOUBLES_EQUAL(expected1.deltaTij(), actual1.deltaTij(), tol); } + /* ************************************************************************* */ -TEST( CombinedImuFactor, ErrorWithBiases ) { +TEST(CombinedImuFactor, ErrorWithBiases ) { Bias bias(Vector3(0.2, 0, 0), Vector3(0, 0, 0.3)); // Biases (acc, rot) Bias bias2(Vector3(0.2, 0.2, 0), Vector3(1, 0, 0.3)); // Biases (acc, rot) Pose3 x1(Rot3::Expmap(Vector3(0, 0, M_PI / 4.0)), Point3(5.0, 1.0, -50.0)); @@ -203,6 +208,114 @@ TEST(CombinedImuFactor, PredictRotation) { EXPECT(assert_equal(expectedPose, actual.pose(), tol)); } +/* ************************************************************************* */ +// Testing covariance to check if all the jacobians are accounted for. +TEST(CombinedImuFactor, CheckCovariance) { + auto params = PreintegrationCombinedParams::MakeSharedU(9.81); + + params->setAccelerometerCovariance(pow(0.01, 2) * I_3x3); + params->setGyroscopeCovariance(pow(1.75e-4, 2) * I_3x3); + params->setIntegrationCovariance(pow(0.0, 2) * I_3x3); + params->setOmegaCoriolis(Vector3::Zero()); + + imuBias::ConstantBias currentBias; + + PreintegratedCombinedMeasurements actual(params, currentBias); + + // Measurements + Vector3 measuredAcc(0.1577, -0.8251, 9.6111); + Vector3 measuredOmega(-0.0210, 0.0311, 0.0145); + double deltaT = 0.01; + + actual.integrateMeasurement(measuredAcc, measuredOmega, deltaT); + + Eigen::Matrix expected; + expected << 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 2.50025e-07, 0, 0, 5.0005e-05, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 2.50025e-07, 0, 0, 5.0005e-05, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 2.50025e-07, 0, 0, 5.0005e-05, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 5.0005e-05, 0, 0, 0.010001, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 5.0005e-05, 0, 0, 0.010001, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 5.0005e-05, 0, 0, 0.010001, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01; + + // regression + EXPECT(assert_equal(expected, actual.preintMeasCov())); +} + +// Test that the covariance values for the ImuFactor and the CombinedImuFactor +// (top-left 9x9) are the same +TEST(CombinedImuFactor, SameCovariance) { + // IMU measurements and time delta + Vector3 accMeas(0.1577, -0.8251, 9.6111); + Vector3 omegaMeas(-0.0210, 0.0311, 0.0145); + double deltaT = 0.01; + + // Assume zero bias + imuBias::ConstantBias currentBias; + + // Define params for ImuFactor + auto params = PreintegrationParams::MakeSharedU(); + params->setAccelerometerCovariance(pow(0.01, 2) * I_3x3); + params->setGyroscopeCovariance(pow(1.75e-4, 2) * I_3x3); + params->setIntegrationCovariance(pow(0, 2) * I_3x3); + params->setOmegaCoriolis(Vector3::Zero()); + + // The IMU preintegration object for ImuFactor + PreintegratedImuMeasurements pim(params, currentBias); + pim.integrateMeasurement(accMeas, omegaMeas, deltaT); + + // Define params for CombinedImuFactor + auto combined_params = PreintegrationCombinedParams::MakeSharedU(); + combined_params->setAccelerometerCovariance(pow(0.01, 2) * I_3x3); + combined_params->setGyroscopeCovariance(pow(1.75e-4, 2) * I_3x3); + // Set bias integration covariance explicitly to zero + combined_params->setIntegrationCovariance(Z_3x3); + combined_params->setOmegaCoriolis(Z_3x1); + // Set bias initial covariance explicitly to zero + combined_params->setBiasAccOmegaInit(Z_6x6); + + // The IMU preintegration object for CombinedImuFactor + PreintegratedCombinedMeasurements cpim(combined_params, currentBias); + cpim.integrateMeasurement(accMeas, omegaMeas, deltaT); + + // Assert if the noise covariance + EXPECT(assert_equal(pim.preintMeasCov(), + cpim.preintMeasCov().block(0, 0, 9, 9))); +} + +/* ************************************************************************* */ +TEST(CombinedImuFactor, Accelerating) { + const double a = 0.2, v = 50; + + // Set up body pointing towards y axis, and start at 10,20,0 with velocity + // going in X The body itself has Z axis pointing down + const Rot3 nRb(Point3(0, 1, 0), Point3(1, 0, 0), Point3(0, 0, -1)); + const Point3 initial_position(10, 20, 0); + const Vector3 initial_velocity(v, 0, 0); + + const AcceleratingScenario scenario(nRb, initial_position, initial_velocity, + Vector3(a, 0, 0)); + + const double T = 3.0; // seconds + + CombinedScenarioRunner runner(scenario, testing::Params(), T / 10); + + PreintegratedCombinedMeasurements pim = runner.integrate(T); + EXPECT(assert_equal(scenario.pose(T), runner.predict(pim).pose(), 1e-9)); + + auto estimatedCov = runner.estimateCovariance(T, 100); + Eigen::Matrix expected = pim.preintMeasCov(); + EXPECT(assert_equal(estimatedCov, expected, 0.1)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/nonlinear/CustomFactor.h b/gtsam/nonlinear/CustomFactor.h index 615b5418e..8580a4949 100644 --- a/gtsam/nonlinear/CustomFactor.h +++ b/gtsam/nonlinear/CustomFactor.h @@ -19,8 +19,6 @@ #include -using namespace gtsam; - namespace gtsam { using JacobianVector = std::vector; diff --git a/gtsam/nonlinear/GncOptimizer.h b/gtsam/nonlinear/GncOptimizer.h index eb1c0233f..065278573 100644 --- a/gtsam/nonlinear/GncOptimizer.h +++ b/gtsam/nonlinear/GncOptimizer.h @@ -42,7 +42,7 @@ static double Chi2inv(const double alpha, const size_t dofs) { /* ************************************************************************* */ template -class GTSAM_EXPORT GncOptimizer { +class GncOptimizer { public: /// For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimizer. typedef typename GncParameters::OptimizerType BaseOptimizer; diff --git a/gtsam/nonlinear/GncParams.h b/gtsam/nonlinear/GncParams.h index 1f324ae38..10ac80663 100644 --- a/gtsam/nonlinear/GncParams.h +++ b/gtsam/nonlinear/GncParams.h @@ -39,7 +39,7 @@ enum GncLossType { }; template -class GTSAM_EXPORT GncParams { +class GncParams { public: /// For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimizer. typedef typename BaseOptimizerParameters::OptimizerType OptimizerType; @@ -72,8 +72,14 @@ class GTSAM_EXPORT GncParams { double relativeCostTol = 1e-5; ///< If relative cost change is below this threshold, stop iterating double weightsTol = 1e-4; ///< If the weights are within weightsTol from being binary, stop iterating (only for TLS) Verbosity verbosity = SILENT; ///< Verbosity level - std::vector knownInliers = std::vector(); ///< Slots in the factor graph corresponding to measurements that we know are inliers - std::vector knownOutliers = std::vector(); ///< Slots in the factor graph corresponding to measurements that we know are outliers + + //TODO(Varun) replace IndexVector with vector once pybind11/stl.h is globally enabled. + /// Use IndexVector for inliers and outliers since it is fast + wrapping + using IndexVector = FastVector; + ///< Slots in the factor graph corresponding to measurements that we know are inliers + IndexVector knownInliers = IndexVector(); + ///< Slots in the factor graph corresponding to measurements that we know are outliers + IndexVector knownOutliers = IndexVector(); /// Set the robust loss function to be used in GNC (chosen among the ones in GncLossType). void setLossType(const GncLossType type) { @@ -114,7 +120,7 @@ class GTSAM_EXPORT GncParams { * This functionality is commonly used in SLAM when one may assume the odometry is outlier free, and * only apply GNC to prune outliers from the loop closures. * */ - void setKnownInliers(const std::vector& knownIn) { + void setKnownInliers(const IndexVector& knownIn) { for (size_t i = 0; i < knownIn.size(); i++){ knownInliers.push_back(knownIn[i]); } @@ -125,7 +131,7 @@ class GTSAM_EXPORT GncParams { * corresponds to the slots in the factor graph. For instance, if you have a nonlinear factor graph nfg, * and you provide knownOut = {0, 2, 15}, GNC will not apply outlier rejection to nfg[0], nfg[2], and nfg[15]. * */ - void setKnownOutliers(const std::vector& knownOut) { + void setKnownOutliers(const IndexVector& knownOut) { for (size_t i = 0; i < knownOut.size(); i++){ knownOutliers.push_back(knownOut[i]); } @@ -163,7 +169,7 @@ class GTSAM_EXPORT GncParams { std::cout << "knownInliers: " << knownInliers[i] << "\n"; for (size_t i = 0; i < knownOutliers.size(); i++) std::cout << "knownOutliers: " << knownOutliers[i] << "\n"; - baseOptimizerParams.print(str); + baseOptimizerParams.print("Base optimizer params: "); } }; diff --git a/gtsam/nonlinear/NonlinearFactor.cpp b/gtsam/nonlinear/NonlinearFactor.cpp index 3d572e970..debff54ac 100644 --- a/gtsam/nonlinear/NonlinearFactor.cpp +++ b/gtsam/nonlinear/NonlinearFactor.cpp @@ -167,8 +167,9 @@ boost::shared_ptr NoiseModelFactor::linearize( return GaussianFactor::shared_ptr( new JacobianFactor(terms, b, boost::static_pointer_cast(noiseModel_)->unit())); - else + else { return GaussianFactor::shared_ptr(new JacobianFactor(terms, b)); + } } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/NonlinearISAM.cpp b/gtsam/nonlinear/NonlinearISAM.cpp index 512069a8a..840712eb6 100644 --- a/gtsam/nonlinear/NonlinearISAM.cpp +++ b/gtsam/nonlinear/NonlinearISAM.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file NonlinearISAM-inl.h + * @file NonlinearISAM.cpp * @date Jan 19, 2010 * @author Viorela Ila and Richard Roberts */ diff --git a/gtsam/nonlinear/custom.i b/gtsam/nonlinear/custom.i new file mode 100644 index 000000000..0d195a91b --- /dev/null +++ b/gtsam/nonlinear/custom.i @@ -0,0 +1,38 @@ +//************************************************************************* +// Custom Factor wrapping +//************************************************************************* + +namespace gtsam { + +#include +virtual class CustomFactor : gtsam::NoiseModelFactor { + /* + * Note CustomFactor will not be wrapped for MATLAB, as there is no supporting + * machinery there. This is achieved by adding `gtsam::CustomFactor` to the + * ignore list in `matlab/CMakeLists.txt`. + */ + CustomFactor(); + /* + * Example: + * ``` + * def error_func(this: CustomFactor, v: gtsam.Values, H: List[np.ndarray]): + * + * if not H is None: + * + * H[0] = J1 # 2-d numpy array for a Jacobian block + * H[1] = J2 + * ... + * return error # 1-d numpy array + * + * cf = CustomFactor(noise_model, keys, error_func) + * ``` + */ + CustomFactor(const gtsam::SharedNoiseModel& noiseModel, + const gtsam::KeyVector& keys, + const gtsam::CustomErrorFunction& errorFunction); + + void print(string s = "", + gtsam::KeyFormatter keyFormatter = gtsam::DefaultKeyFormatter); +}; + +} \ No newline at end of file diff --git a/gtsam/nonlinear/nonlinear.i b/gtsam/nonlinear/nonlinear.i index 033e5ced2..55966ee84 100644 --- a/gtsam/nonlinear/nonlinear.i +++ b/gtsam/nonlinear/nonlinear.i @@ -99,11 +99,11 @@ class NonlinearFactorGraph { string dot( const gtsam::Values& values, const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter, - const GraphvizFormatting& writer = GraphvizFormatting()); + const gtsam::GraphvizFormatting& writer = gtsam::GraphvizFormatting()); void saveGraph( const string& s, const gtsam::Values& values, const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter, - const GraphvizFormatting& writer = GraphvizFormatting()) const; + const gtsam::GraphvizFormatting& writer = gtsam::GraphvizFormatting()) const; // enabling serialization functionality void serialize() const; @@ -135,37 +135,6 @@ virtual class NoiseModelFactor : gtsam::NonlinearFactor { Vector whitenedError(const gtsam::Values& x) const; }; -#include -virtual class CustomFactor : gtsam::NoiseModelFactor { - /* - * Note CustomFactor will not be wrapped for MATLAB, as there is no supporting - * machinery there. This is achieved by adding `gtsam::CustomFactor` to the - * ignore list in `matlab/CMakeLists.txt`. - */ - CustomFactor(); - /* - * Example: - * ``` - * def error_func(this: CustomFactor, v: gtsam.Values, H: List[np.ndarray]): - * - * if not H is None: - * - * H[0] = J1 # 2-d numpy array for a Jacobian block - * H[1] = J2 - * ... - * return error # 1-d numpy array - * - * cf = CustomFactor(noise_model, keys, error_func) - * ``` - */ - CustomFactor(const gtsam::SharedNoiseModel& noiseModel, - const gtsam::KeyVector& keys, - const gtsam::CustomErrorFunction& errorFunction); - - void print(string s = "", - gtsam::KeyFormatter keyFormatter = gtsam::DefaultKeyFormatter); -}; - #include class Values { Values(); @@ -544,12 +513,34 @@ virtual class DoglegParams : gtsam::NonlinearOptimizerParams { }; #include +enum GncLossType { + GM /*Geman McClure*/, + TLS /*Truncated least squares*/ +}; + template virtual class GncParams { GncParams(const PARAMS& baseOptimizerParams); GncParams(); - void setVerbosityGNC(const This::Verbosity value); - void print(const string& str) const; + BaseOptimizerParameters baseOptimizerParams; + gtsam::GncLossType lossType; + size_t maxIterations; + double muStep; + double relativeCostTol; + double weightsTol; + Verbosity verbosity; + gtsam::KeyVector knownInliers; + gtsam::KeyVector knownOutliers; + + void setLossType(const gtsam::GncLossType type); + void setMaxIterations(const size_t maxIter); + void setMuStep(const double step); + void setRelativeCostTol(double value); + void setWeightsTol(double value); + void setVerbosityGNC(const gtsam::This::Verbosity value); + void setKnownInliers(const gtsam::KeyVector& knownIn); + void setKnownOutliers(const gtsam::KeyVector& knownOut); + void print(const string& str = "GncParams: ") const; enum Verbosity { SILENT, @@ -597,6 +588,11 @@ virtual class GncOptimizer { GncOptimizer(const gtsam::NonlinearFactorGraph& graph, const gtsam::Values& initialValues, const PARAMS& params); + void setInlierCostThresholds(const double inth); + const Vector& getInlierCostThresholds(); + void setInlierCostThresholdsAtProbability(const double alpha); + void setWeights(const Vector w); + const Vector& getWeights(); gtsam::Values optimize(); }; @@ -705,7 +701,7 @@ class ISAM2Result { /** Getters and Setters for all properties */ size_t getVariablesRelinearized() const; size_t getVariablesReeliminated() const; - FactorIndices getNewFactorsIndices() const; + gtsam::FactorIndices getNewFactorsIndices() const; size_t getCliques() const; double getErrorBefore() const; double getErrorAfter() const; @@ -873,7 +869,7 @@ template , gtsam::imuBias::ConstantBias}> virtual class NonlinearEquality2 : gtsam::NoiseModelFactor { - NonlinearEquality2(Key key1, Key key2, double mu = 1e4); + NonlinearEquality2(gtsam::Key key1, gtsam::Key key2, double mu = 1e4); gtsam::Vector evaluateError(const T& x1, const T& x2); }; diff --git a/gtsam/nonlinear/tests/testExpression.cpp b/gtsam/nonlinear/tests/testExpression.cpp index 92f4998a2..9987cca3f 100644 --- a/gtsam/nonlinear/tests/testExpression.cpp +++ b/gtsam/nonlinear/tests/testExpression.cpp @@ -122,7 +122,7 @@ class Class : public Point3 { enum {dimension = 3}; using Point3::Point3; const Vector3& vector() const { return *this; } - inline static Class identity() { return Class(0,0,0); } + inline static Class Identity() { return Class(0,0,0); } double norm(OptionalJacobian<1,3> H = boost::none) const { return norm3(*this, H); } @@ -285,7 +285,7 @@ TEST(Expression, compose2) { // Test compose with one arguments referring to constant rotation. TEST(Expression, compose3) { // Create expression - Rot3_ R1(Rot3::identity()), R2(3); + Rot3_ R1(Rot3::Identity()), R2(3); Rot3_ R3 = R1 * R2; // Check keys diff --git a/gtsam/slam/tests/testOrientedPlane3Factor.cpp b/gtsam/slam/tests/testOrientedPlane3Factor.cpp index 35c750408..502c62c11 100644 --- a/gtsam/slam/tests/testOrientedPlane3Factor.cpp +++ b/gtsam/slam/tests/testOrientedPlane3Factor.cpp @@ -217,7 +217,7 @@ TEST(OrientedPlane3Factor, Issue561Simplified) { // Setup prior factors // Note: If x0 is too far away from the origin (e.g. x=100) this test can fail. - Pose3 x0(Rot3::identity(), Vector3(10, -1, 1)); + Pose3 x0(Rot3::Identity(), Vector3(10, -1, 1)); auto x0_noise = noiseModel::Isotropic::Sigma(6, 0.01); graph.addPrior(X(0), x0, x0_noise); @@ -241,7 +241,7 @@ TEST(OrientedPlane3Factor, Issue561Simplified) { // Initial values // Just offset the initial pose by 1m. This is what we are trying to optimize. Values initialEstimate; - Pose3 x0_initial = x0.compose(Pose3(Rot3::identity(), Vector3(1,0,0))); + Pose3 x0_initial = x0.compose(Pose3(Rot3::Identity(), Vector3(1,0,0))); initialEstimate.insert(P(1), p1); initialEstimate.insert(P(2), p2); initialEstimate.insert(X(0), x0_initial); diff --git a/gtsam/slam/tests/testSmartProjectionRigFactor.cpp b/gtsam/slam/tests/testSmartProjectionRigFactor.cpp index b4876b27e..b142b2009 100644 --- a/gtsam/slam/tests/testSmartProjectionRigFactor.cpp +++ b/gtsam/slam/tests/testSmartProjectionRigFactor.cpp @@ -69,7 +69,7 @@ SmartProjectionParams params( TEST(SmartProjectionRigFactor, Constructor) { using namespace vanillaRig; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartRigFactor::shared_ptr factor1( new SmartRigFactor(model, cameraRig, params)); } @@ -89,7 +89,7 @@ TEST(SmartProjectionRigFactor, Constructor2) { TEST(SmartProjectionRigFactor, Constructor3) { using namespace vanillaRig; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartRigFactor::shared_ptr factor1( new SmartRigFactor(model, cameraRig, params)); factor1->add(measurement1, x1, cameraId1); @@ -99,7 +99,7 @@ TEST(SmartProjectionRigFactor, Constructor3) { TEST(SmartProjectionRigFactor, Constructor4) { using namespace vanillaRig; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartProjectionParams params2( gtsam::HESSIAN, gtsam::ZERO_ON_DEGENERACY); // only config that works with rig factors @@ -112,7 +112,7 @@ TEST(SmartProjectionRigFactor, Constructor4) { TEST(SmartProjectionRigFactor, Equals) { using namespace vanillaRig; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartRigFactor::shared_ptr factor1( new SmartRigFactor(model, cameraRig, params)); @@ -140,7 +140,7 @@ TEST(SmartProjectionRigFactor, noiseless) { Point2 level_uv_right = level_camera_right.project(landmark1); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartRigFactor factor(model, cameraRig, params); factor.add(level_uv, x1); // both taken from the same camera @@ -197,7 +197,7 @@ TEST(SmartProjectionRigFactor, noisy) { using namespace vanillaRig; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); // Project two landmarks into two cameras Point2 pixelError(0.2, 0.2); @@ -323,7 +323,7 @@ TEST(SmartProjectionRigFactor, smartFactorWithMultipleCameras) { Pose3(Rot3::Ypr(-M_PI / 2, 0., -M_PI / 2), Point3(1, 1, 1)); Pose3 body_T_sensor2 = Pose3(Rot3::Ypr(-M_PI / 5, 0., -M_PI / 2), Point3(0, 0, 1)); - Pose3 body_T_sensor3 = Pose3::identity(); + Pose3 body_T_sensor3 = Pose3(); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig cameraRig->push_back(Camera(body_T_sensor1, sharedK)); @@ -407,7 +407,7 @@ TEST(SmartProjectionRigFactor, 3poses_smart_projection_factor) { Point2Vector measurements_cam1, measurements_cam2, measurements_cam3; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK2)); + cameraRig->push_back(Camera(Pose3(), sharedK2)); // Project three landmarks into three cameras projectToMultipleCameras(cam1, cam2, cam3, landmark1, measurements_cam1); @@ -495,7 +495,7 @@ TEST(SmartProjectionRigFactor, Factors) { FastVector cameraIds{0, 0}; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); SmartRigFactor::shared_ptr smartFactor1 = boost::make_shared( model, cameraRig, params); @@ -578,7 +578,7 @@ TEST(SmartProjectionRigFactor, 3poses_iterative_smart_projection_factor) { // create smart factor boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( new SmartRigFactor(model, cameraRig, params)); @@ -646,7 +646,7 @@ TEST(SmartProjectionRigFactor, landmarkDistance) { params.setEnableEPI(false); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( @@ -717,7 +717,7 @@ TEST(SmartProjectionRigFactor, dynamicOutlierRejection) { params.setDynamicOutlierRejectionThreshold(dynamicOutlierRejectionThreshold); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( @@ -783,7 +783,7 @@ TEST(SmartProjectionRigFactor, CheckHessian) { params.setDegeneracyMode(gtsam::ZERO_ON_DEGENERACY); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( @@ -859,7 +859,7 @@ TEST(SmartProjectionRigFactor, Hessian) { measurements_cam1.push_back(cam2_uv1); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK2)); + cameraRig->push_back(Camera(Pose3(), sharedK2)); FastVector cameraIds{0, 0}; SmartProjectionParams params( @@ -889,7 +889,7 @@ TEST(SmartProjectionRigFactor, Hessian) { TEST(SmartProjectionRigFactor, ConstructorWithCal3Bundler) { using namespace bundlerPose; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedBundlerK)); + cameraRig->push_back(Camera(Pose3(), sharedBundlerK)); SmartProjectionParams params; params.setDegeneracyMode(gtsam::ZERO_ON_DEGENERACY); @@ -917,7 +917,7 @@ TEST(SmartProjectionRigFactor, Cal3Bundler) { KeyVector views{x1, x2, x3}; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedBundlerK)); + cameraRig->push_back(Camera(Pose3(), sharedBundlerK)); FastVector cameraIds{0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( @@ -988,7 +988,7 @@ TEST(SmartProjectionRigFactor, KeyVector keys{x1, x2, x3, x1}; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0, 0}; SmartRigFactor::shared_ptr smartFactor1( @@ -1116,7 +1116,7 @@ TEST(SmartProjectionRigFactor, optimization_3poses_measurementsFromSamePose) { // create inputs KeyVector keys{x1, x2, x3}; boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3(), sharedK)); FastVector cameraIds{0, 0, 0}; FastVector cameraIdsRedundant{0, 0, 0, 0}; @@ -1195,11 +1195,11 @@ TEST(SmartProjectionRigFactor, timing) { // Default cameras for simple derivatives static Cal3_S2::shared_ptr sharedKSimple(new Cal3_S2(100, 100, 0, 0, 0)); - Rot3 R = Rot3::identity(); + Rot3 R = Rot3::Identity(); Pose3 pose1 = Pose3(R, Point3(0, 0, 0)); Pose3 pose2 = Pose3(R, Point3(1, 0, 0)); Camera cam1(pose1, sharedKSimple), cam2(pose2, sharedKSimple); - Pose3 body_P_sensorId = Pose3::identity(); + Pose3 body_P_sensorId = Pose3(); boost::shared_ptr cameraRig(new Cameras()); // single camera in the rig cameraRig->push_back(Camera(body_P_sensorId, sharedKSimple)); @@ -1267,7 +1267,7 @@ TEST(SmartProjectionFactorP, optimization_3poses_sphericalCamera) { keys.push_back(x3); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), emptyK)); + cameraRig->push_back(Camera(Pose3(), emptyK)); SmartProjectionParams params( gtsam::HESSIAN, @@ -1330,10 +1330,10 @@ TEST(SmartProjectionFactorP, optimization_3poses_sphericalCamera) { /* *************************************************************************/ TEST(SmartProjectionFactorP, timing_sphericalCamera) { // create common data - Rot3 R = Rot3::identity(); + Rot3 R = Rot3::Identity(); Pose3 pose1 = Pose3(R, Point3(0, 0, 0)); Pose3 pose2 = Pose3(R, Point3(1, 0, 0)); - Pose3 body_P_sensorId = Pose3::identity(); + Pose3 body_P_sensorId = Pose3(); Point3 landmark1(0, 0, 10); // create spherical data @@ -1423,7 +1423,7 @@ TEST(SmartProjectionFactorP, 2poses_rankTol) { boost::shared_ptr>> cameraRig( new CameraSet>()); // single camera in the rig - cameraRig->push_back(PinholePose(Pose3::identity(), sharedK)); + cameraRig->push_back(PinholePose(Pose3(), sharedK)); SmartRigFactor::shared_ptr smartFactor1( new SmartRigFactor(model, cameraRig, params)); @@ -1461,7 +1461,7 @@ TEST(SmartProjectionFactorP, 2poses_rankTol) { boost::shared_ptr>> cameraRig( new CameraSet>()); // single camera in the rig - cameraRig->push_back(PinholePose(Pose3::identity(), canonicalK)); + cameraRig->push_back(PinholePose(Pose3(), canonicalK)); SmartRigFactor::shared_ptr smartFactor1( new SmartRigFactor(model, cameraRig, params)); @@ -1498,7 +1498,7 @@ TEST(SmartProjectionFactorP, 2poses_rankTol) { boost::shared_ptr>> cameraRig( new CameraSet>()); // single camera in the rig - cameraRig->push_back(PinholePose(Pose3::identity(), canonicalK)); + cameraRig->push_back(PinholePose(Pose3(), canonicalK)); SmartRigFactor::shared_ptr smartFactor1( new SmartRigFactor(model, cameraRig, params)); @@ -1537,7 +1537,7 @@ TEST(SmartProjectionFactorP, 2poses_sphericalCamera_rankTol) { boost::shared_ptr> cameraRig( new CameraSet()); // single camera in the rig - cameraRig->push_back(SphericalCamera(Pose3::identity(), emptyK)); + cameraRig->push_back(SphericalCamera(Pose3(), emptyK)); // TRIANGULATION TEST WITH DEFAULT RANK TOL { // rankTol = 1 or 0.1 gives a degenerate point, which is undesirable for a diff --git a/gtsam_unstable/dynamics/tests/testVelocityConstraint.cpp b/gtsam_unstable/dynamics/tests/testVelocityConstraint.cpp index 6d8206177..b60b2f3ae 100644 --- a/gtsam_unstable/dynamics/tests/testVelocityConstraint.cpp +++ b/gtsam_unstable/dynamics/tests/testVelocityConstraint.cpp @@ -15,9 +15,9 @@ const Key x1 = 1, x2 = 2; const double dt = 1.0; PoseRTV origin, - pose1(Point3(0.5, 0.0, 0.0), Rot3::identity(), Velocity3(1.0, 0.0, 0.0)), + pose1(Point3(0.5, 0.0, 0.0), Rot3(), Velocity3(1.0, 0.0, 0.0)), pose1a(Point3(0.5, 0.0, 0.0)), - pose2(Point3(1.5, 0.0, 0.0), Rot3::identity(), Velocity3(1.0, 0.0, 0.0)); + pose2(Point3(1.5, 0.0, 0.0), Rot3(), Velocity3(1.0, 0.0, 0.0)); /* ************************************************************************* */ TEST( testVelocityConstraint, trapezoidal ) { diff --git a/gtsam_unstable/examples/GncPoseAveragingExample.cpp b/gtsam_unstable/examples/GncPoseAveragingExample.cpp index ad96934c8..730d342f0 100644 --- a/gtsam_unstable/examples/GncPoseAveragingExample.cpp +++ b/gtsam_unstable/examples/GncPoseAveragingExample.cpp @@ -53,7 +53,7 @@ int main(int argc, char** argv){ normal_distribution normalInliers(0.0, 0.05); Values initial; - initial.insert(0, Pose3::identity()); // identity pose as initialization + initial.insert(0, Pose3()); // identity pose as initialization // create ground truth pose Vector6 poseGtVector; diff --git a/gtsam_unstable/examples/ISAM2_SmartFactorStereo_IMU.cpp b/gtsam_unstable/examples/ISAM2_SmartFactorStereo_IMU.cpp index c0a102d11..16eda23d4 100644 --- a/gtsam_unstable/examples/ISAM2_SmartFactorStereo_IMU.cpp +++ b/gtsam_unstable/examples/ISAM2_SmartFactorStereo_IMU.cpp @@ -129,8 +129,8 @@ int main(int argc, char* argv[]) { // Pose prior - at identity auto priorPoseNoise = noiseModel::Diagonal::Sigmas( (Vector(6) << Vector3::Constant(0.1), Vector3::Constant(0.1)).finished()); - graph.addPrior(X(1), Pose3::identity(), priorPoseNoise); - initialEstimate.insert(X(0), Pose3::identity()); + graph.addPrior(X(1), Pose3::Identity(), priorPoseNoise); + initialEstimate.insert(X(0), Pose3::Identity()); // Bias prior graph.addPrior(B(1), imu.priorImuBias, @@ -157,7 +157,7 @@ int main(int argc, char* argv[]) { if (frame != lastFrame || in.eof()) { cout << "Running iSAM for frame: " << lastFrame << "\n"; - initialEstimate.insert(X(lastFrame), Pose3::identity()); + initialEstimate.insert(X(lastFrame), Pose3::Identity()); initialEstimate.insert(V(lastFrame), Vector3(0, 0, 0)); initialEstimate.insert(B(lastFrame), imu.prevBias); diff --git a/gtsam_unstable/geometry/Pose3Upright.h b/gtsam_unstable/geometry/Pose3Upright.h index d833c9cf4..037a6cc9d 100644 --- a/gtsam_unstable/geometry/Pose3Upright.h +++ b/gtsam_unstable/geometry/Pose3Upright.h @@ -95,7 +95,7 @@ public: /// @{ /// identity for group operation - static Pose3Upright identity() { return Pose3Upright(); } + static Pose3Upright Identity() { return Pose3Upright(); } /// inverse transformation with derivatives Pose3Upright inverse(boost::optional H1=boost::none) const; diff --git a/gtsam_unstable/gtsam_unstable.i b/gtsam_unstable/gtsam_unstable.i index 08cd45e18..c6dbd4ab6 100644 --- a/gtsam_unstable/gtsam_unstable.i +++ b/gtsam_unstable/gtsam_unstable.i @@ -130,7 +130,7 @@ class Pose3Upright { gtsam::Pose3Upright retract(Vector v) const; Vector localCoordinates(const gtsam::Pose3Upright& p2) const; - static gtsam::Pose3Upright identity(); + static gtsam::Pose3Upright Identity(); gtsam::Pose3Upright inverse() const; gtsam::Pose3Upright compose(const gtsam::Pose3Upright& p2) const; gtsam::Pose3Upright between(const gtsam::Pose3Upright& p2) const; diff --git a/gtsam_unstable/partition/tests/CMakeLists.txt b/gtsam_unstable/partition/tests/CMakeLists.txt index 9e74d9996..d89a1fe98 100644 --- a/gtsam_unstable/partition/tests/CMakeLists.txt +++ b/gtsam_unstable/partition/tests/CMakeLists.txt @@ -1,2 +1,2 @@ set(ignore_test "testNestedDissection.cpp") -gtsamAddTestsGlob(partition "test*.cpp" "${ignore_test}" "gtsam_unstable;gtsam;metis-gtsam") +gtsamAddTestsGlob(partition "test*.cpp" "${ignore_test}" "gtsam_unstable;gtsam;metis-gtsam-if") diff --git a/gtsam_unstable/slam/tests/testLocalOrientedPlane3Factor.cpp b/gtsam_unstable/slam/tests/testLocalOrientedPlane3Factor.cpp index b97d56c7e..c2b526ecd 100644 --- a/gtsam_unstable/slam/tests/testLocalOrientedPlane3Factor.cpp +++ b/gtsam_unstable/slam/tests/testLocalOrientedPlane3Factor.cpp @@ -44,8 +44,8 @@ TEST(LocalOrientedPlane3Factor, lm_translation_error) { // Init pose and prior. Pose Prior is needed since a single plane measurement // does not fully constrain the pose - Pose3 init_pose = Pose3::identity(); - Pose3 anchor_pose = Pose3::identity(); + Pose3 init_pose = Pose3::Identity(); + Pose3 anchor_pose = Pose3::Identity(); graph.addPrior(X(0), init_pose, noiseModel::Isotropic::Sigma(6, 0.001)); graph.addPrior(X(1), anchor_pose, noiseModel::Isotropic::Sigma(6, 0.001)); @@ -89,7 +89,7 @@ TEST (LocalOrientedPlane3Factor, lm_rotation_error) { // Init pose and prior. Pose Prior is needed since a single plane measurement // does not fully constrain the pose - Pose3 init_pose = Pose3::identity(); + Pose3 init_pose = Pose3::Identity(); graph.addPrior(X(0), init_pose, noiseModel::Isotropic::Sigma(6, 0.001)); // Add two landmark measurements, differing in angle @@ -180,8 +180,8 @@ TEST(LocalOrientedPlane3Factor, Issue561Simplified) { NonlinearFactorGraph graph; // Setup prior factors - Pose3 x0(Rot3::identity(), Vector3(100, 30, 10)); // the "sensor pose" - Pose3 x1(Rot3::identity(), Vector3(90, 40, 5) ); // the "anchor pose" + Pose3 x0(Rot3::Identity(), Vector3(100, 30, 10)); // the "sensor pose" + Pose3 x1(Rot3::Identity(), Vector3(90, 40, 5) ); // the "anchor pose" auto x0_noise = noiseModel::Isotropic::Sigma(6, 0.01); auto x1_noise = noiseModel::Isotropic::Sigma(6, 0.01); @@ -213,7 +213,7 @@ TEST(LocalOrientedPlane3Factor, Issue561Simplified) { // Initial values // Just offset the initial pose by 1m. This is what we are trying to optimize. Values initialEstimate; - Pose3 x0_initial = x0.compose(Pose3(Rot3::identity(), Vector3(1, 0, 0))); + Pose3 x0_initial = x0.compose(Pose3(Rot3::Identity(), Vector3(1, 0, 0))); initialEstimate.insert(P(1), p1_in_x1); initialEstimate.insert(P(2), p2_in_x1); initialEstimate.insert(X(0), x0_initial); diff --git a/gtsam_unstable/slam/tests/testPartialPriorFactor.cpp b/gtsam_unstable/slam/tests/testPartialPriorFactor.cpp index 7ba8a0d04..615ed13aa 100644 --- a/gtsam_unstable/slam/tests/testPartialPriorFactor.cpp +++ b/gtsam_unstable/slam/tests/testPartialPriorFactor.cpp @@ -173,7 +173,7 @@ TEST(PartialPriorFactor, Constructors3) { /* ************************************************************************* */ TEST(PartialPriorFactor, JacobianAtIdentity3) { Key poseKey(1); - Pose3 measurement = Pose3::identity(); + Pose3 measurement = Pose3::Identity(); SharedNoiseModel model = NM::Isotropic::Sigma(1, 0.25); TestPartialPriorFactor3 factor(poseKey, kIndexTy, measurement.translation().y(), model); diff --git a/gtsam_unstable/slam/tests/testPoseToPointFactor.cpp b/gtsam_unstable/slam/tests/testPoseToPointFactor.cpp index 5aaaaec53..7345e2308 100644 --- a/gtsam_unstable/slam/tests/testPoseToPointFactor.cpp +++ b/gtsam_unstable/slam/tests/testPoseToPointFactor.cpp @@ -16,7 +16,7 @@ using namespace gtsam::noiseModel; /* ************************************************************************* */ // Verify zero error when there is no noise TEST(PoseToPointFactor, errorNoiseless_2D) { - Pose2 pose = Pose2::identity(); + Pose2 pose = Pose2::Identity(); Point2 point(1.0, 2.0); Point2 noise(0.0, 0.0); Point2 measured = point + noise; @@ -33,7 +33,7 @@ TEST(PoseToPointFactor, errorNoiseless_2D) { /* ************************************************************************* */ // Verify expected error in test scenario TEST(PoseToPointFactor, errorNoise_2D) { - Pose2 pose = Pose2::identity(); + Pose2 pose = Pose2::Identity(); Point2 point(1.0, 2.0); Point2 noise(-1.0, 0.5); Point2 measured = point + noise; @@ -86,7 +86,7 @@ TEST(PoseToPointFactor, jacobian_2D) { /* ************************************************************************* */ // Verify zero error when there is no noise TEST(PoseToPointFactor, errorNoiseless_3D) { - Pose3 pose = Pose3::identity(); + Pose3 pose = Pose3::Identity(); Point3 point(1.0, 2.0, 3.0); Point3 noise(0.0, 0.0, 0.0); Point3 measured = point + noise; @@ -103,7 +103,7 @@ TEST(PoseToPointFactor, errorNoiseless_3D) { /* ************************************************************************* */ // Verify expected error in test scenario TEST(PoseToPointFactor, errorNoise_3D) { - Pose3 pose = Pose3::identity(); + Pose3 pose = Pose3::Identity(); Point3 point(1.0, 2.0, 3.0); Point3 noise(-1.0, 0.5, 0.3); Point3 measured = point + noise; diff --git a/gtsam_unstable/slam/tests/testSmartProjectionPoseFactorRollingShutter.cpp b/gtsam_unstable/slam/tests/testSmartProjectionPoseFactorRollingShutter.cpp index b5962d777..b88a27c6c 100644 --- a/gtsam_unstable/slam/tests/testSmartProjectionPoseFactorRollingShutter.cpp +++ b/gtsam_unstable/slam/tests/testSmartProjectionPoseFactorRollingShutter.cpp @@ -88,7 +88,7 @@ typedef SmartProjectionPoseFactorRollingShutter> TEST(SmartProjectionPoseFactorRollingShutter, Constructor) { using namespace vanillaPoseRS; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr factor1( new SmartFactorRS(model, cameraRig, params)); } @@ -97,7 +97,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, Constructor) { TEST(SmartProjectionPoseFactorRollingShutter, Constructor2) { using namespace vanillaPoseRS; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); params.setRankTolerance(rankTol); SmartFactorRS factor1(model, cameraRig, params); } @@ -106,7 +106,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, Constructor2) { TEST(SmartProjectionPoseFactorRollingShutter, add) { using namespace vanillaPoseRS; boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr factor1( new SmartFactorRS(model, cameraRig, params)); factor1->add(measurement1, x1, x2, interp_factor); @@ -230,7 +230,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, noiselessErrorAndJacobians) { // Project two landmarks into two cameras Point2 level_uv = cam1.project(landmark1); Point2 level_uv_right = cam2.project(landmark1); - Pose3 body_P_sensorId = Pose3::identity(); + Pose3 body_P_sensorId = Pose3::Identity(); boost::shared_ptr cameraRig(new Cameras()); cameraRig->push_back(Camera(body_P_sensorId, sharedK)); @@ -405,7 +405,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, optimization_3poses) { interp_factors.push_back(interp_factor3); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -480,7 +480,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, optimization_3poses_multiCam) { boost::shared_ptr cameraRig(new Cameras()); cameraRig->push_back(Camera(body_P_sensor, sharedK)); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -633,11 +633,11 @@ TEST(SmartProjectionPoseFactorRollingShutter, hessian_simple_2poses) { // Default cameras for simple derivatives static Cal3_S2::shared_ptr sharedKSimple(new Cal3_S2(100, 100, 0, 0, 0)); - Rot3 R = Rot3::identity(); + Rot3 R = Rot3::Identity(); Pose3 pose1 = Pose3(R, Point3(0, 0, 0)); Pose3 pose2 = Pose3(R, Point3(1, 0, 0)); Camera cam1(pose1, sharedKSimple), cam2(pose2, sharedKSimple); - Pose3 body_P_sensorId = Pose3::identity(); + Pose3 body_P_sensorId = Pose3::Identity(); // one landmarks 1m in front of camera Point3 landmark1(0, 0, 10); @@ -747,7 +747,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, optimization_3poses_EPI) { params.setEnableEPI(true); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS smartFactor1(model, cameraRig, params); smartFactor1.add(measurements_lmk1, key_pairs, interp_factors); @@ -816,7 +816,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, params.setEnableEPI(false); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS smartFactor1(model, cameraRig, params); smartFactor1.add(measurements_lmk1, key_pairs, interp_factors); @@ -894,7 +894,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, params.setEnableEPI(false); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -961,7 +961,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, interp_factors.push_back(interp_factor3); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -1102,7 +1102,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, interp_factors.push_back(interp_factor1); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -1261,7 +1261,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, interp_factors.at(0)); // we readd the first interp factor boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), sharedK)); + cameraRig->push_back(Camera(Pose3::Identity(), sharedK)); SmartFactorRS::shared_ptr smartFactor1( new SmartFactorRS(model, cameraRig, params)); @@ -1331,11 +1331,11 @@ TEST(SmartProjectionPoseFactorRollingShutter, timing) { gtsam::HESSIAN, gtsam::ZERO_ON_DEGENERACY); // only config that works with RS factors - Rot3 R = Rot3::identity(); + Rot3 R = Rot3::Identity(); Pose3 pose1 = Pose3(R, Point3(0, 0, 0)); Pose3 pose2 = Pose3(R, Point3(1, 0, 0)); Camera cam1(pose1, sharedKSimple), cam2(pose2, sharedKSimple); - Pose3 body_P_sensorId = Pose3::identity(); + Pose3 body_P_sensorId = Pose3::Identity(); // one landmarks 1m in front of camera Point3 landmark1(0, 0, 10); @@ -1431,7 +1431,7 @@ TEST(SmartProjectionPoseFactorRollingShutter, params.setRankTolerance(0.1); boost::shared_ptr cameraRig(new Cameras()); - cameraRig->push_back(Camera(Pose3::identity(), emptyK)); + cameraRig->push_back(Camera(Pose3::Identity(), emptyK)); SmartFactorRS_spherical::shared_ptr smartFactor1( new SmartFactorRS_spherical(model, cameraRig, params)); diff --git a/gtsam_unstable/slam/tests/testSmartStereoFactor_iSAM2.cpp b/gtsam_unstable/slam/tests/testSmartStereoFactor_iSAM2.cpp index 107defb4c..98b18371f 100644 --- a/gtsam_unstable/slam/tests/testSmartStereoFactor_iSAM2.cpp +++ b/gtsam_unstable/slam/tests/testSmartStereoFactor_iSAM2.cpp @@ -198,10 +198,10 @@ TEST(testISAM2SmartFactor, Stereo_Batch) { // prior, for the first keyframe: if (kf_id == 0) { - batch_graph.addPrior(X(kf_id), Pose3::identity(), priorPoseNoise); + batch_graph.addPrior(X(kf_id), Pose3::Identity(), priorPoseNoise); } - batch_values.insert(X(kf_id), Pose3::identity()); + batch_values.insert(X(kf_id), Pose3::Identity()); } LevenbergMarquardtParams parameters; @@ -267,7 +267,7 @@ TEST(testISAM2SmartFactor, Stereo_iSAM2) { // Storage of smart factors: std::map smartFactors; - Pose3 lastKeyframePose = Pose3::identity(); + Pose3 lastKeyframePose = Pose3::Identity(); // Run one timestep at once: for (const auto &entries : dataset) { @@ -307,7 +307,7 @@ TEST(testISAM2SmartFactor, Stereo_iSAM2) { // prior, for the first keyframe: if (kf_id == 0) { - newFactors.addPrior(X(kf_id), Pose3::identity(), priorPoseNoise); + newFactors.addPrior(X(kf_id), Pose3::Identity(), priorPoseNoise); } // 2) Run iSAM2: diff --git a/gtsam_unstable/slam/tests/testSmartStereoProjectionFactorPP.cpp b/gtsam_unstable/slam/tests/testSmartStereoProjectionFactorPP.cpp index c71c19038..1dbd25666 100644 --- a/gtsam_unstable/slam/tests/testSmartStereoProjectionFactorPP.cpp +++ b/gtsam_unstable/slam/tests/testSmartStereoProjectionFactorPP.cpp @@ -181,8 +181,8 @@ TEST_UNSAFE( SmartStereoProjectionFactorPP, noiseless_error_identityExtrinsics ) Values values; values.insert(x1, w_Pose_cam1); values.insert(x2, w_Pose_cam2); - values.insert(body_P_cam1_key, Pose3::identity()); - values.insert(body_P_cam2_key, Pose3::identity()); + values.insert(body_P_cam1_key, Pose3::Identity()); + values.insert(body_P_cam2_key, Pose3::Identity()); SmartStereoProjectionFactorPP factor1(model); factor1.add(cam1_uv, x1, body_P_cam1_key, K2); @@ -426,7 +426,7 @@ TEST( SmartStereoProjectionFactorPP, 3poses_optimization_multipleExtrinsics ) { // Values Pose3 body_Pose_cam1 = Pose3(Rot3::Ypr(-M_PI, 1., 0.1),Point3(0, 1, 0)); Pose3 body_Pose_cam2 = Pose3(Rot3::Ypr(-M_PI / 4, 0.1, 1.0),Point3(1, 1, 1)); - Pose3 body_Pose_cam3 = Pose3::identity(); + Pose3 body_Pose_cam3 = Pose3::Identity(); Pose3 w_Pose_body1 = w_Pose_cam1.compose(body_Pose_cam1.inverse()); Pose3 w_Pose_body2 = w_Pose_cam2.compose(body_Pose_cam2.inverse()); Pose3 w_Pose_body3 = w_Pose_cam3.compose(body_Pose_cam3.inverse()); @@ -1147,7 +1147,7 @@ TEST( SmartStereoProjectionFactorPP, landmarkDistance ) { graph.push_back(smartFactor3); graph.addPrior(x1, pose1, noisePrior); graph.addPrior(x2, pose2, noisePrior); - graph.addPrior(body_P_cam_key, Pose3::identity(), noisePrior); + graph.addPrior(body_P_cam_key, Pose3::Identity(), noisePrior); // Pose3 noise_pose = Pose3(Rot3::Ypr(-M_PI/10, 0., -M_PI/10), Point3(0.5,0.1,0.3)); // noise from regular projection factor test below Pose3 noise_pose = Pose3(Rot3::Ypr(-M_PI / 100, 0., -M_PI / 100), @@ -1156,7 +1156,7 @@ TEST( SmartStereoProjectionFactorPP, landmarkDistance ) { values.insert(x1, pose1); values.insert(x2, pose2); values.insert(x3, pose3 * noise_pose); - values.insert(body_P_cam_key, Pose3::identity()); + values.insert(body_P_cam_key, Pose3::Identity()); // All smart factors are disabled and pose should remain where it is Values result; @@ -1245,7 +1245,7 @@ TEST( SmartStereoProjectionFactorPP, dynamicOutlierRejection ) { values.insert(x1, pose1); values.insert(x2, pose2); values.insert(x3, pose3); - values.insert(body_P_cam_key, Pose3::identity()); + values.insert(body_P_cam_key, Pose3::Identity()); EXPECT_DOUBLES_EQUAL(0, smartFactor1->error(values), 1e-9); EXPECT_DOUBLES_EQUAL(0, smartFactor2->error(values), 1e-9); @@ -1267,7 +1267,7 @@ TEST( SmartStereoProjectionFactorPP, dynamicOutlierRejection ) { Values result; LevenbergMarquardtOptimizer optimizer(graph, values, lm_params); result = optimizer.optimize(); - EXPECT(assert_equal(Pose3::identity(), result.at(body_P_cam_key))); + EXPECT(assert_equal(Pose3::Identity(), result.at(body_P_cam_key))); } /* ************************************************************************* */ diff --git a/matlab/CMakeLists.txt b/matlab/CMakeLists.txt index a657c6be7..a77db06a4 100644 --- a/matlab/CMakeLists.txt +++ b/matlab/CMakeLists.txt @@ -73,6 +73,7 @@ set(interface_files ${GTSAM_SOURCE_DIR}/gtsam/geometry/geometry.i ${GTSAM_SOURCE_DIR}/gtsam/linear/linear.i ${GTSAM_SOURCE_DIR}/gtsam/nonlinear/nonlinear.i + ${GTSAM_SOURCE_DIR}/gtsam/nonlinear/custom.i ${GTSAM_SOURCE_DIR}/gtsam/symbolic/symbolic.i ${GTSAM_SOURCE_DIR}/gtsam/sam/sam.i ${GTSAM_SOURCE_DIR}/gtsam/slam/slam.i @@ -98,7 +99,6 @@ endif(GTSAM_UNSTABLE_INSTALL_MATLAB_TOOLBOX) # Record the root dir for gtsam - needed during external builds, e.g., ROS set(GTSAM_SOURCE_ROOT_DIR ${GTSAM_SOURCE_DIR}) -message(STATUS "GTSAM_SOURCE_ROOT_DIR: [${GTSAM_SOURCE_ROOT_DIR}]") # Tests message(STATUS "Installing Matlab Toolbox") install_matlab_scripts("${GTSAM_SOURCE_ROOT_DIR}/matlab/" "*.m;*.fig") diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 3761bcef1..ffb52ad11 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -63,6 +63,7 @@ set(interface_headers ${PROJECT_SOURCE_DIR}/gtsam/geometry/geometry.i ${PROJECT_SOURCE_DIR}/gtsam/linear/linear.i ${PROJECT_SOURCE_DIR}/gtsam/nonlinear/nonlinear.i + ${PROJECT_SOURCE_DIR}/gtsam/nonlinear/custom.i ${PROJECT_SOURCE_DIR}/gtsam/symbolic/symbolic.i ${PROJECT_SOURCE_DIR}/gtsam/sam/sam.i ${PROJECT_SOURCE_DIR}/gtsam/slam/slam.i diff --git a/python/gtsam/preamble/custom.h b/python/gtsam/preamble/custom.h new file mode 100644 index 000000000..d07a75f6f --- /dev/null +++ b/python/gtsam/preamble/custom.h @@ -0,0 +1,12 @@ +/* Please refer to: + * https://pybind11.readthedocs.io/en/stable/advanced/cast/stl.html + * These are required to save one copy operation on Python calls. + * + * NOTES + * ================= + * + * `PYBIND11_MAKE_OPAQUE` will mark the type as "opaque" for the pybind11 + * automatic STL binding, such that the raw objects can be accessed in Python. + * Without this they will be automatically converted to a Python object, and all + * mutations on Python side will not be reflected on C++. + */ diff --git a/python/gtsam/preamble/hybrid.h b/python/gtsam/preamble/hybrid.h index 5e5a71e48..bae636d6a 100644 --- a/python/gtsam/preamble/hybrid.h +++ b/python/gtsam/preamble/hybrid.h @@ -10,5 +10,12 @@ * Without this they will be automatically converted to a Python object, and all * mutations on Python side will not be reflected on C++. */ +#include + +#ifdef GTSAM_ALLOCATOR_TBB +PYBIND11_MAKE_OPAQUE(std::vector>); +#else +PYBIND11_MAKE_OPAQUE(std::vector); +#endif PYBIND11_MAKE_OPAQUE(std::vector); diff --git a/python/gtsam/preamble/nonlinear.h b/python/gtsam/preamble/nonlinear.h index a34e73058..d07a75f6f 100644 --- a/python/gtsam/preamble/nonlinear.h +++ b/python/gtsam/preamble/nonlinear.h @@ -9,4 +9,4 @@ * automatic STL binding, such that the raw objects can be accessed in Python. * Without this they will be automatically converted to a Python object, and all * mutations on Python side will not be reflected on C++. - */ \ No newline at end of file + */ diff --git a/python/gtsam/specializations/custom.h b/python/gtsam/specializations/custom.h new file mode 100644 index 000000000..d46ccdc66 --- /dev/null +++ b/python/gtsam/specializations/custom.h @@ -0,0 +1,12 @@ +/* Please refer to: + * https://pybind11.readthedocs.io/en/stable/advanced/cast/stl.html + * These are required to save one copy operation on Python calls. + * + * NOTES + * ================= + * + * `py::bind_vector` and similar machinery gives the std container a Python-like + * interface, but without the `` copying mechanism. Combined + * with `PYBIND11_MAKE_OPAQUE` this allows the types to be modified with Python, + * and saves one copy operation. + */ \ No newline at end of file diff --git a/python/gtsam/specializations/nonlinear.h b/python/gtsam/specializations/nonlinear.h index d46ccdc66..da8842eaf 100644 --- a/python/gtsam/specializations/nonlinear.h +++ b/python/gtsam/specializations/nonlinear.h @@ -9,4 +9,4 @@ * interface, but without the `` copying mechanism. Combined * with `PYBIND11_MAKE_OPAQUE` this allows the types to be modified with Python, * and saves one copy operation. - */ \ No newline at end of file + */ diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 781cfd924..576efa82f 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -55,6 +55,34 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): discrete_conditional = hbn.at(hbn.size() - 1).inner() self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional) + def test_optimize(self): + """Test contruction of hybrid factor graph.""" + noiseModel = gtsam.noiseModel.Unit.Create(3) + dk = gtsam.DiscreteKeys() + dk.push_back((C(0), 2)) + + jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), + noiseModel) + jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), + noiseModel) + + gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2]) + + hfg = gtsam.HybridGaussianFactorGraph() + hfg.add(jf1) + hfg.add(jf2) + hfg.push_back(gmf) + + dtf = gtsam.DecisionTreeFactor([(C(0), 2)],"0 1") + hfg.add(dtf) + + hbn = hfg.eliminateSequential( + gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( + hfg, [C(0)])) + + # print("hbn = ", hbn) + hv = hbn.optimize() + self.assertEqual(hv.atDiscrete(C(0)), 1) if __name__ == "__main__": unittest.main() diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py new file mode 100644 index 000000000..3ac0d5c6f --- /dev/null +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -0,0 +1,55 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Hybrid Nonlinear Factor Graphs. +Author: Fan Jiang +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import gtsam +import numpy as np +from gtsam.symbol_shorthand import C, X +from gtsam.utils.test_case import GtsamTestCase + + +class TestHybridGaussianFactorGraph(GtsamTestCase): + """Unit tests for HybridGaussianFactorGraph.""" + + def test_nonlinear_hybrid(self): + nlfg = gtsam.HybridNonlinearFactorGraph() + dk = gtsam.DiscreteKeys() + dk.push_back((10, 2)) + nlfg.add(gtsam.BetweenFactorPoint3(1, 2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([1, 1, 1]))) + nlfg.add( + gtsam.PriorFactorPoint3(2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) + nlfg.push_back( + gtsam.MixtureFactor([1], dk, [ + gtsam.PriorFactorPoint3(1, gtsam.Point3(0, 0, 0), + gtsam.noiseModel.Unit.Create(3)), + gtsam.PriorFactorPoint3(1, gtsam.Point3(1, 2, 1), + gtsam.noiseModel.Unit.Create(3)) + ])) + nlfg.add(gtsam.DecisionTreeFactor((10, 2), "1 3")) + values = gtsam.Values() + values.insert_point3(1, gtsam.Point3(0, 0, 0)) + values.insert_point3(2, gtsam.Point3(2, 3, 1)) + hfg = nlfg.linearize(values) + o = gtsam.Ordering() + o.push_back(1) + o.push_back(2) + o.push_back(10) + hbn = hfg.eliminateSequential(o) + hbv = hbn.optimize() + self.assertEqual(hbv.atDiscrete(10), 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py new file mode 100644 index 000000000..63e7c8e7d --- /dev/null +++ b/python/gtsam/tests/test_HybridValues.py @@ -0,0 +1,41 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Hybrid Values. +Author: Shangjie Xue +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import gtsam +import numpy as np +from gtsam.symbol_shorthand import C, X +from gtsam.utils.test_case import GtsamTestCase + + +class TestHybridGaussianFactorGraph(GtsamTestCase): + """Unit tests for HybridValues.""" + + def test_basic(self): + """Test contruction and basic methods of hybrid values.""" + + hv1 = gtsam.HybridValues() + hv1.insert(X(0), np.ones((3,1))) + hv1.insert(C(0), 2) + + hv2 = gtsam.HybridValues() + hv2.insert(C(0), 2) + hv2.insert(X(0), np.ones((3,1))) + + self.assertEqual(hv1.atDiscrete(C(0)), 2) + self.assertEqual(hv1.at(X(0))[0], np.ones((3,1))[0]) + +if __name__ == "__main__": + unittest.main() diff --git a/python/gtsam/tests/test_NonlinearOptimizer.py b/python/gtsam/tests/test_NonlinearOptimizer.py index 37afd9e1c..a47c5ad62 100644 --- a/python/gtsam/tests/test_NonlinearOptimizer.py +++ b/python/gtsam/tests/test_NonlinearOptimizer.py @@ -15,10 +15,12 @@ from __future__ import print_function import unittest import gtsam -from gtsam import (DoglegOptimizer, DoglegParams, DummyPreconditionerParameters, - GaussNewtonOptimizer, GaussNewtonParams, GncLMParams, GncLMOptimizer, - LevenbergMarquardtOptimizer, LevenbergMarquardtParams, NonlinearFactorGraph, - Ordering, PCGSolverParameters, Point2, PriorFactorPoint2, Values) +from gtsam import ( + DoglegOptimizer, DoglegParams, DummyPreconditionerParameters, GaussNewtonOptimizer, + GaussNewtonParams, GncLMParams, GncLossType, GncLMOptimizer, LevenbergMarquardtOptimizer, + LevenbergMarquardtParams, NonlinearFactorGraph, Ordering, PCGSolverParameters, Point2, + PriorFactorPoint2, Values +) from gtsam.utils.test_case import GtsamTestCase KEY1 = 1 @@ -27,7 +29,6 @@ KEY2 = 2 class TestScenario(GtsamTestCase): """Do trivial test with three optimizer variants.""" - def setUp(self): """Set up the optimization problem and ordering""" # create graph @@ -83,16 +84,83 @@ class TestScenario(GtsamTestCase): actual = GncLMOptimizer(self.fg, self.initial_values, gncParams).optimize() self.assertAlmostEqual(0, self.fg.error(actual)) + def test_gnc_params(self): + base_params = LevenbergMarquardtParams() + # Test base params + for base_max_iters in (50, 100): + base_params.setMaxIterations(base_max_iters) + params = GncLMParams(base_params) + self.assertEqual(params.baseOptimizerParams.getMaxIterations(), base_max_iters) + + # Test printing + params_str = str(params) + for s in ( + "lossType", + "maxIterations", + "muStep", + "relativeCostTol", + "weightsTol", + "verbosity", + ): + self.assertTrue(s in params_str) + + # Test each parameter + for loss_type in (GncLossType.TLS, GncLossType.GM): + params.setLossType(loss_type) # Default is TLS + self.assertEqual(params.lossType, loss_type) + for max_iter in (1, 10, 100): + params.setMaxIterations(max_iter) + self.assertEqual(params.maxIterations, max_iter) + for mu_step in (1.1, 1.2, 1.5): + params.setMuStep(mu_step) + self.assertEqual(params.muStep, mu_step) + for rel_cost_tol in (1e-5, 1e-6, 1e-7): + params.setRelativeCostTol(rel_cost_tol) + self.assertEqual(params.relativeCostTol, rel_cost_tol) + for weights_tol in (1e-4, 1e-3, 1e-2): + params.setWeightsTol(weights_tol) + self.assertEqual(params.weightsTol, weights_tol) + for i in (0, 1, 2): + verb = GncLMParams.Verbosity(i) + params.setVerbosityGNC(verb) + self.assertEqual(params.verbosity, verb) + for inl in ([], [10], [0, 100]): + params.setKnownInliers(inl) + self.assertEqual(params.knownInliers, inl) + params.knownInliers = [] + for out in ([], [1], [0, 10]): + params.setKnownInliers(out) + self.assertEqual(params.knownInliers, out) + params.knownInliers = [] + + # Test optimizer params + optimizer = GncLMOptimizer(self.fg, self.initial_values, params) + for ict_factor in (0.9, 1.1): + new_ict = ict_factor * optimizer.getInlierCostThresholds() + optimizer.setInlierCostThresholds(new_ict) + self.assertAlmostEqual(optimizer.getInlierCostThresholds(), new_ict) + for w_factor in (0.8, 0.9): + new_weights = w_factor * optimizer.getWeights() + optimizer.setWeights(new_weights) + self.assertAlmostEqual(optimizer.getWeights(), new_weights) + optimizer.setInlierCostThresholdsAtProbability(0.9) + w1 = optimizer.getInlierCostThresholds() + optimizer.setInlierCostThresholdsAtProbability(0.8) + w2 = optimizer.getInlierCostThresholds() + self.assertLess(w2, w1) + def test_iteration_hook(self): # set up iteration hook to track some testable values iteration_count = 0 final_error = 0 final_values = None + def iteration_hook(iter, error_before, error_after): nonlocal iteration_count, final_error, final_values iteration_count = iter final_error = error_after final_values = optimizer.values() + # optimize params = LevenbergMarquardtParams.CeresDefaults() params.setOrdering(self.ordering) @@ -104,5 +172,6 @@ class TestScenario(GtsamTestCase): self.assertEqual(self.fg.error(actual), final_error) self.assertEqual(optimizer.iterations(), iteration_count) + if __name__ == "__main__": unittest.main() diff --git a/tests/testExpressionFactor.cpp b/tests/testExpressionFactor.cpp index 6d23144aa..5b27eea4d 100644 --- a/tests/testExpressionFactor.cpp +++ b/tests/testExpressionFactor.cpp @@ -382,7 +382,7 @@ TEST(ExpressionFactor, compose2) { TEST(ExpressionFactor, compose3) { // Create expression - Rot3_ R1(Rot3::identity()), R2(3); + Rot3_ R1(Rot3::Identity()), R2(3); Rot3_ R3 = R1 * R2; // Create factor diff --git a/tests/testGncOptimizer.cpp b/tests/testGncOptimizer.cpp index c3335ce20..15d660114 100644 --- a/tests/testGncOptimizer.cpp +++ b/tests/testGncOptimizer.cpp @@ -567,7 +567,7 @@ TEST(GncOptimizer, optimizeWithKnownInliers) { Values initial; initial.insert(X(1), p0); - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(0); knownInliers.push_back(1); knownInliers.push_back(2); @@ -644,7 +644,7 @@ TEST(GncOptimizer, barcsq) { Values initial; initial.insert(X(1), p0); - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(0); knownInliers.push_back(1); knownInliers.push_back(2); @@ -691,7 +691,7 @@ TEST(GncOptimizer, setInlierCostThresholds) { Values initial; initial.insert(X(1), p0); - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(0); knownInliers.push_back(1); knownInliers.push_back(2); @@ -763,7 +763,7 @@ TEST(GncOptimizer, optimizeSmallPoseGraph) { // GNC // Note: in difficult instances, we set the odometry measurements to be // inliers, but this problem is simple enought to succeed even without that - // assumption std::vector knownInliers; + // assumption GncParams::IndexVector knownInliers; GncParams gncParams; auto gnc = GncOptimizer>(*graph, *initial, gncParams); @@ -784,12 +784,12 @@ TEST(GncOptimizer, knownInliersAndOutliers) { // nonconvexity with known inliers and known outliers (check early stopping // when all measurements are known to be inliers or outliers) { - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(0); knownInliers.push_back(1); knownInliers.push_back(2); - std::vector knownOutliers; + GncParams::IndexVector knownOutliers; knownOutliers.push_back(3); GncParams gncParams; @@ -813,11 +813,11 @@ TEST(GncOptimizer, knownInliersAndOutliers) { // nonconvexity with known inliers and known outliers { - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(2); knownInliers.push_back(0); - std::vector knownOutliers; + GncParams::IndexVector knownOutliers; knownOutliers.push_back(3); GncParams gncParams; @@ -841,7 +841,7 @@ TEST(GncOptimizer, knownInliersAndOutliers) { // only known outliers { - std::vector knownOutliers; + GncParams::IndexVector knownOutliers; knownOutliers.push_back(3); GncParams gncParams; @@ -916,11 +916,11 @@ TEST(GncOptimizer, setWeights) { // initialize weights and also set known inliers/outliers { GncParams gncParams; - std::vector knownInliers; + GncParams::IndexVector knownInliers; knownInliers.push_back(2); knownInliers.push_back(0); - std::vector knownOutliers; + GncParams::IndexVector knownOutliers; knownOutliers.push_back(3); gncParams.setKnownInliers(knownInliers); gncParams.setKnownOutliers(knownOutliers); diff --git a/tests/testManifold.cpp b/tests/testManifold.cpp index ac3a09e36..a05c4b621 100644 --- a/tests/testManifold.cpp +++ b/tests/testManifold.cpp @@ -139,7 +139,7 @@ TEST(Manifold, DefaultChart) { Vector v3(3); v3 << 1, 1, 1; - Rot3 I = Rot3::identity(); + Rot3 I = Rot3::Identity(); Rot3 R = I.retract(v3); //DefaultChart chart5; EXPECT(assert_equal(v3, traits::Local(I, R))); diff --git a/timing/timeShonanFactor.cpp b/timing/timeShonanFactor.cpp index 207d54a4d..60dd6f47a 100644 --- a/timing/timeShonanFactor.cpp +++ b/timing/timeShonanFactor.cpp @@ -62,9 +62,9 @@ int main(int argc, char* argv[]) { // Build graph NonlinearFactorGraph graph; - // graph.add(NonlinearEquality(0, SOn::identity(4))); + // graph.add(NonlinearEquality(0, SOn::Identity(4))); auto priorModel = noiseModel::Isotropic::Sigma(6, 10000); - graph.add(PriorFactor(0, SOn::identity(4), priorModel)); + graph.add(PriorFactor(0, SOn::Identity(4), priorModel)); auto G = boost::make_shared(SOn::VectorizedGenerators(4)); for (const auto &m : measurements) { const auto &keys = m.keys(); @@ -92,7 +92,7 @@ int main(int argc, char* argv[]) { for (size_t i = 0; i < 100; i++) { gttic_(optimize); Values initial; - initial.insert(0, SOn::identity(4)); + initial.insert(0, SOn::Identity(4)); for (size_t j = 1; j < poses.size(); j++) { initial.insert(j, SOn::Random(rng, 4)); }