diff --git a/.github/scripts/python.sh b/.github/scripts/python.sh new file mode 100644 index 000000000..5cc6ada24 --- /dev/null +++ b/.github/scripts/python.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +########################################################## +# Build and test the GTSAM Python wrapper. +########################################################## + +set -x -e + +if [ -z ${PYTHON_VERSION+x} ]; then + echo "Please provide the Python version to build against!" + exit 127 +fi + +if [ -z ${WRAPPER+x} ]; then + echo "Please provide the wrapper to build!" + exit 126 +fi + +PYTHON="python${PYTHON_VERSION}" + +if [[ $(uname) == "Darwin" ]]; then + brew install wget +else + # Install a system package required by our library + sudo apt-get install -y wget libicu-dev python3-pip python3-setuptools +fi + +PATH=$PATH:$($PYTHON -c "import site; print(site.USER_BASE)")/bin + +case $WRAPPER in +"cython") + BUILD_CYTHON="ON" + BUILD_PYBIND="OFF" + TYPEDEF_POINTS_TO_VECTORS="OFF" + + sudo $PYTHON -m pip install -r $GITHUB_WORKSPACE/cython/requirements.txt + ;; +"pybind") + BUILD_CYTHON="OFF" + BUILD_PYBIND="ON" + TYPEDEF_POINTS_TO_VECTORS="ON" + + sudo $PYTHON -m pip install -r $GITHUB_WORKSPACE/python/requirements.txt + ;; +*) + exit 126 + ;; +esac + +git submodule update --init --recursive + +mkdir $GITHUB_WORKSPACE/build +cd $GITHUB_WORKSPACE/build + +cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=Release \ + -DGTSAM_BUILD_TESTS=OFF -DGTSAM_BUILD_UNSTABLE=ON \ + -DGTSAM_USE_QUATERNIONS=OFF \ + -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF \ + -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ + -DGTSAM_INSTALL_CYTHON_TOOLBOX=${BUILD_CYTHON} \ + -DGTSAM_BUILD_PYTHON=${BUILD_PYBIND} \ + -DGTSAM_TYPEDEF_POINTS_TO_VECTORS=${TYPEDEF_POINTS_TO_VECTORS} \ + -DGTSAM_PYTHON_VERSION=$PYTHON_VERSION \ + -DPYTHON_EXECUTABLE:FILEPATH=$(which $PYTHON) \ + -DGTSAM_ALLOW_DEPRECATED_SINCE_V41=OFF \ + -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/gtsam_install + +make -j$(nproc) install & + +while ps -p $! > /dev/null +do + sleep 60 + now=$(date +%s) + printf "%d seconds have elapsed\n" $(( (now - start) )) +done + +case $WRAPPER in +"cython") + cd $GITHUB_WORKSPACE/build/cython + $PYTHON setup.py install --user --prefix= + cd $GITHUB_WORKSPACE/build/cython/gtsam/tests + $PYTHON -m unittest discover + ;; +"pybind") + cd $GITHUB_WORKSPACE/python + $PYTHON setup.py install --user --prefix= + cd $GITHUB_WORKSPACE/wrap/python/gtsam_py/tests + $PYTHON -m unittest discover + ;; +*) + echo "THIS SHOULD NEVER HAPPEN!" + exit 125 + ;; +esac \ No newline at end of file diff --git a/.travis.sh b/.github/scripts/unix.sh old mode 100755 new mode 100644 similarity index 73% rename from .travis.sh rename to .github/scripts/unix.sh index 7777e9919..aa6e49650 --- a/.travis.sh +++ b/.github/scripts/unix.sh @@ -1,20 +1,25 @@ #!/bin/bash +########################################################## +# Build and test GTSAM for *nix based systems. +# Specifically Linux and macOS. +########################################################## + # install TBB with _debug.so files function install_tbb() { TBB_BASEURL=https://github.com/oneapi-src/oneTBB/releases/download - TBB_VERSION=4.4.2 - TBB_DIR=tbb44_20151115oss + TBB_VERSION=4.4.5 + TBB_DIR=tbb44_20160526oss TBB_SAVEPATH="/tmp/tbb.tgz" - if [ "$TRAVIS_OS_NAME" == "linux" ]; then + if [ "$(uname)" == "Linux" ]; then OS_SHORT="lin" TBB_LIB_DIR="intel64/gcc4.4" SUDO="sudo" - elif [ "$TRAVIS_OS_NAME" == "osx" ]; then - OS_SHORT="lin" + elif [ "$(uname)" == "Darwin" ]; then + OS_SHORT="osx" TBB_LIB_DIR="" SUDO="" @@ -25,7 +30,7 @@ function install_tbb() TBBROOT=/tmp/$TBB_DIR # Copy the needed files to the correct places. - # This works correctly for travis builds, instead of setting path variables. + # This works correctly for CI builds, instead of setting path variables. # This is what Homebrew does to install TBB on Macs $SUDO cp -R $TBBROOT/lib/$TBB_LIB_DIR/* /usr/local/lib/ $SUDO cp -R $TBBROOT/include/ /usr/local/include/ @@ -38,15 +43,15 @@ function configure() set -e # Make sure any error makes the script to return an error code set -x # echo - SOURCE_DIR=`pwd` - BUILD_DIR=build + SOURCE_DIR=$GITHUB_WORKSPACE + BUILD_DIR=$GITHUB_WORKSPACE/build #env - git clean -fd || true + git submodule update --init --recursive rm -fr $BUILD_DIR || true mkdir $BUILD_DIR && cd $BUILD_DIR - install_tbb + [ "${GTSAM_WITH_TBB:-OFF}" = "ON" ] && install_tbb if [ ! -z "$GCC_VERSION" ]; then export CC=gcc-$GCC_VERSION @@ -63,7 +68,10 @@ function configure() -DGTSAM_BUILD_EXAMPLES_ALWAYS=${GTSAM_BUILD_EXAMPLES_ALWAYS:-ON} \ -DGTSAM_ALLOW_DEPRECATED_SINCE_V4=${GTSAM_ALLOW_DEPRECATED_SINCE_V41:-OFF} \ -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ - -DCMAKE_VERBOSE_MAKEFILE=OFF + -DCMAKE_VERBOSE_MAKEFILE=ON \ + -DBOOST_ROOT=$BOOST_ROOT \ + -DBoost_NO_SYSTEM_PATHS=ON \ + -DBoost_ARCHITECTURE=-x64 } @@ -71,7 +79,7 @@ function configure() function finish () { # Print ccache stats - ccache -s + [ -x "$(command -v ccache)" ] && ccache -s cd $SOURCE_DIR } @@ -111,4 +119,4 @@ case $1 in -t) test ;; -esac +esac \ No newline at end of file diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml new file mode 100644 index 000000000..911bec59c --- /dev/null +++ b/.github/workflows/build-linux.yml @@ -0,0 +1,78 @@ +name: Linux CI + +on: [push, pull_request] + +jobs: + build: + name: ${{ matrix.name }} ${{ matrix.build_type }} + runs-on: ${{ matrix.os }} + + env: + CTEST_OUTPUT_ON_FAILURE: ON + CTEST_PARALLEL_LEVEL: 2 + CMAKE_BUILD_TYPE: ${{ matrix.build_type }} + GTSAM_BUILD_UNSTABLE: ${{ matrix.build_unstable }} + + strategy: + fail-fast: false + matrix: + # Github Actions requires a single row to be added to the build matrix. + # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. + name: [ + ubuntu-18.04-gcc-5, + ubuntu-18.04-gcc-9, + ubuntu-18.04-clang-9, + ] + + build_type: [Debug, Release] + build_unstable: [ON] + include: + - name: ubuntu-18.04-gcc-5 + os: ubuntu-18.04 + compiler: gcc + version: "5" + + - name: ubuntu-18.04-gcc-9 + os: ubuntu-18.04 + compiler: gcc + version: "9" + + - name: ubuntu-18.04-clang-9 + os: ubuntu-18.04 + compiler: clang + version: "9" + + steps: + - name: Checkout + uses: actions/checkout@master + - name: Install (Linux) + if: runner.os == 'Linux' + run: | + # LLVM 9 is not in Bionic's repositories so we add the official LLVM repository. + if [ "${{ matrix.compiler }}" = "clang" ] && [ "${{ matrix.version }}" = "9" ]; then + sudo add-apt-repository "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main" + fi + sudo apt-get -y update + + sudo apt install cmake build-essential pkg-config libpython-dev python-numpy + + echo "::set-env name=BOOST_ROOT::$(echo $BOOST_ROOT_1_69_0)" + echo "::set-env name=LD_LIBRARY_PATH::$(echo $BOOST_ROOT_1_69_0/lib)" + + if [ "${{ matrix.compiler }}" = "gcc" ]; then + sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib + echo "::set-env name=CC::gcc-${{ matrix.version }}" + echo "::set-env name=CXX::g++-${{ matrix.version }}" + else + sudo apt-get install -y clang-${{ matrix.version }} g++-multilib + echo "::set-env name=CC::clang-${{ matrix.version }}" + echo "::set-env name=CXX::clang++-${{ matrix.version }}" + fi + - name: Check Boost version + if: runner.os == 'Linux' + run: | + echo "BOOST_ROOT = $BOOST_ROOT" + - name: Build (Linux) + if: runner.os == 'Linux' + run: | + bash .github/scripts/unix.sh -t \ No newline at end of file diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml new file mode 100644 index 000000000..55d9071ef --- /dev/null +++ b/.github/workflows/build-macos.yml @@ -0,0 +1,51 @@ +name: macOS CI + +on: [pull_request] + +jobs: + build: + name: ${{ matrix.name }} ${{ matrix.build_type }} + runs-on: ${{ matrix.os }} + + env: + CTEST_OUTPUT_ON_FAILURE: ON + CTEST_PARALLEL_LEVEL: 2 + CMAKE_BUILD_TYPE: ${{ matrix.build_type }} + GTSAM_BUILD_UNSTABLE: ${{ matrix.build_unstable }} + strategy: + fail-fast: false + matrix: + # Github Actions requires a single row to be added to the build matrix. + # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. + name: [ + macOS-10.15-xcode-11.3.1, + ] + + build_type: [Debug, Release] + build_unstable: [ON] + include: + - name: macOS-10.15-xcode-11.3.1 + os: macOS-10.15 + compiler: xcode + version: "11.3.1" + + steps: + - name: Checkout + uses: actions/checkout@master + - name: Install (macOS) + if: runner.os == 'macOS' + run: | + brew install cmake ninja boost + if [ "${{ matrix.compiler }}" = "gcc" ]; then + brew install gcc@${{ matrix.version }} + echo "::set-env name=CC::gcc-${{ matrix.version }}" + echo "::set-env name=CXX::g++-${{ matrix.version }}" + else + sudo xcode-select -switch /Applications/Xcode_${{ matrix.version }}.app + echo "::set-env name=CC::clang" + echo "::set-env name=CXX::clang++" + fi + - name: Build (macOS) + if: runner.os == 'macOS' + run: | + bash .github/scripts/unix.sh -t \ No newline at end of file diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml new file mode 100644 index 000000000..0b4a7f12f --- /dev/null +++ b/.github/workflows/build-python.yml @@ -0,0 +1,95 @@ +name: Python CI + +on: [pull_request] + +jobs: + build: + name: ${{ matrix.name }} ${{ matrix.build_type }} Python ${{ matrix.python_version }} + runs-on: ${{ matrix.os }} + + env: + CTEST_OUTPUT_ON_FAILURE: ON + CTEST_PARALLEL_LEVEL: 2 + CMAKE_BUILD_TYPE: ${{ matrix.build_type }} + PYTHON_VERSION: ${{ matrix.python_version }} + WRAPPER: ${{ matrix.wrapper }} + strategy: + fail-fast: false + matrix: + # Github Actions requires a single row to be added to the build matrix. + # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. + name: [ + ubuntu-18.04-gcc-5, + ubuntu-18.04-gcc-9, + ubuntu-18.04-clang-9, + macOS-10.15-xcode-11.3.1, + ] + + build_type: [Debug, Release] + python_version: [3] + wrapper: [cython] + include: + - name: ubuntu-18.04-gcc-5 + os: ubuntu-18.04 + compiler: gcc + version: "5" + + - name: ubuntu-18.04-gcc-9 + os: ubuntu-18.04 + compiler: gcc + version: "9" + + - name: ubuntu-18.04-clang-9 + os: ubuntu-18.04 + compiler: clang + version: "9" + + - name: macOS-10.15-xcode-11.3.1 + os: macOS-10.15 + compiler: xcode + version: "11.3.1" + + steps: + - name: Checkout + uses: actions/checkout@master + - name: Install (Linux) + if: runner.os == 'Linux' + run: | + # LLVM 9 is not in Bionic's repositories so we add the official LLVM repository. + if [ "${{ matrix.compiler }}" = "clang" ] && [ "${{ matrix.version }}" = "9" ]; then + sudo add-apt-repository "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main" + fi + sudo apt-get -y update + + sudo apt install cmake build-essential pkg-config libpython-dev python-numpy libboost-all-dev + + if [ "${{ matrix.compiler }}" = "gcc" ]; then + sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib + echo "::set-env name=CC::gcc-${{ matrix.version }}" + echo "::set-env name=CXX::g++-${{ matrix.version }}" + else + sudo apt-get install -y clang-${{ matrix.version }} g++-multilib + echo "::set-env name=CC::clang-${{ matrix.version }}" + echo "::set-env name=CXX::clang++-${{ matrix.version }}" + fi + - name: Install (macOS) + if: runner.os == 'macOS' + run: | + brew install cmake ninja boost + if [ "${{ matrix.compiler }}" = "gcc" ]; then + brew install gcc@${{ matrix.version }} + echo "::set-env name=CC::gcc-${{ matrix.version }}" + echo "::set-env name=CXX::g++-${{ matrix.version }}" + else + sudo xcode-select -switch /Applications/Xcode_${{ matrix.version }}.app + echo "::set-env name=CC::clang" + echo "::set-env name=CXX::clang++" + fi + - name: Build (Linux) + if: runner.os == 'Linux' + run: | + bash .github/scripts/python.sh + - name: Build (macOS) + if: runner.os == 'macOS' + run: | + bash .github/scripts/python.sh \ No newline at end of file diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml new file mode 100644 index 000000000..b3150a751 --- /dev/null +++ b/.github/workflows/build-windows.yml @@ -0,0 +1,75 @@ +name: Windows CI + +on: [pull_request] + +jobs: + build: + name: ${{ matrix.name }} ${{ matrix.build_type }} + runs-on: ${{ matrix.os }} + + env: + CTEST_OUTPUT_ON_FAILURE: ON + CTEST_PARALLEL_LEVEL: 2 + CMAKE_BUILD_TYPE: ${{ matrix.build_type }} + GTSAM_BUILD_UNSTABLE: ${{ matrix.build_unstable }} + strategy: + fail-fast: false + matrix: + # Github Actions requires a single row to be added to the build matrix. + # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. + name: [ + windows-2016-cl, + windows-2019-cl, + ] + + build_type: [Debug, Release] + build_unstable: [ON] + include: + - name: windows-2016-cl + os: windows-2016 + compiler: cl + + - name: windows-2019-cl + os: windows-2019 + compiler: cl + + steps: + - name: Checkout + uses: actions/checkout@master + - name: Install (Windows) + if: runner.os == 'Windows' + shell: powershell + run: | + Invoke-Expression (New-Object System.Net.WebClient).DownloadString('https://get.scoop.sh') + scoop install ninja --global + if ("${{ matrix.compiler }}".StartsWith("clang")) { + scoop install llvm --global + } + if ("${{ matrix.compiler }}" -eq "gcc") { + # Chocolatey GCC is broken on the windows-2019 image. + # See: https://github.com/DaanDeMeyer/doctest/runs/231595515 + # See: https://github.community/t5/GitHub-Actions/Something-is-wrong-with-the-chocolatey-installed-version-of-gcc/td-p/32413 + scoop install gcc --global + echo "::set-env name=CC::gcc" + echo "::set-env name=CXX::g++" + } elseif ("${{ matrix.compiler }}" -eq "clang") { + echo "::set-env name=CC::clang" + echo "::set-env name=CXX::clang++" + } else { + echo "::set-env name=CC::${{ matrix.compiler }}" + echo "::set-env name=CXX::${{ matrix.compiler }}" + } + # Scoop modifies the PATH so we make the modified PATH global. + echo "::set-env name=PATH::$env:PATH" + - name: Build (Windows) + if: runner.os == 'Windows' + run: | + cmake -E remove_directory build + echo "BOOST_ROOT_1_72_0: ${env:BOOST_ROOT_1_72_0}" + cmake -B build -S . -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF -DBOOST_ROOT="${env:BOOST_ROOT_1_72_0}" -DBOOST_INCLUDEDIR="${env:BOOST_ROOT_1_72_0}\boost\include" -DBOOST_LIBRARYDIR="${env:BOOST_ROOT_1_72_0}\lib" + cmake --build build --config ${{ matrix.build_type }} --target gtsam + cmake --build build --config ${{ matrix.build_type }} --target gtsam_unstable + cmake --build build --config ${{ matrix.build_type }} --target wrap + cmake --build build --config ${{ matrix.build_type }} --target check.base + cmake --build build --config ${{ matrix.build_type }} --target check.base_unstable + cmake --build build --config ${{ matrix.build_type }} --target check.linear \ No newline at end of file diff --git a/.github/workflows/trigger-python.yml b/.github/workflows/trigger-python.yml index 8fad9e7ca..94527e732 100644 --- a/.github/workflows/trigger-python.yml +++ b/.github/workflows/trigger-python.yml @@ -1,6 +1,9 @@ # This triggers Cython builds on `gtsam-manylinux-build` name: Trigger Python Builds -on: push +on: + push: + branches: + - develop jobs: triggerCython: runs-on: ubuntu-latest diff --git a/.travis.python.sh b/.travis.python.sh deleted file mode 100644 index 99506e749..000000000 --- a/.travis.python.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -set -x -e - -if [ -z ${PYTHON_VERSION+x} ]; then - echo "Please provide the Python version to build against!" - exit 127 -fi - -PYTHON="python${PYTHON_VERSION}" - -if [[ $(uname) == "Darwin" ]]; then - brew install wget -else - # Install a system package required by our library - sudo apt-get install wget libicu-dev python3-pip python3-setuptools -fi - -CURRDIR=$(pwd) - -sudo $PYTHON -m pip install -r ./cython/requirements.txt - -mkdir $CURRDIR/build -cd $CURRDIR/build - -cmake $CURRDIR -DCMAKE_BUILD_TYPE=Release \ - -DGTSAM_BUILD_TESTS=OFF -DGTSAM_BUILD_UNSTABLE=ON \ - -DGTSAM_USE_QUATERNIONS=OFF \ - -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF \ - -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ - -DGTSAM_INSTALL_CYTHON_TOOLBOX=ON \ - -DGTSAM_PYTHON_VERSION=$PYTHON_VERSION \ - -DGTSAM_ALLOW_DEPRECATED_SINCE_V41=OFF \ - -DCMAKE_INSTALL_PREFIX=$CURRDIR/../gtsam_install - -make -j$(nproc) install - -cd cython - -sudo $PYTHON setup.py install - -cd $CURRDIR/cython/gtsam/tests - -$PYTHON -m unittest discover \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 986e86cc2..000000000 --- a/.travis.yml +++ /dev/null @@ -1,140 +0,0 @@ -language: cpp -cache: ccache -dist: xenial - -addons: - apt: - sources: - - ubuntu-toolchain-r-test - - sourceline: 'deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-9 main' - key_url: 'https://apt.llvm.org/llvm-snapshot.gpg.key' - packages: - - g++-9 - - clang-9 - - build-essential pkg-config - - cmake - - python3-dev libpython-dev - - python3-numpy - - libboost-all-dev - -# before_install: - # - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew update; fi - -install: - - if [ "$TRAVIS_OS_NAME" == "osx" ]; then HOMEBREW_NO_AUTO_UPDATE=1 brew install ccache ; fi - - if [ "$TRAVIS_OS_NAME" == "osx" ]; then export PATH="/usr/local/opt/ccache/libexec:$PATH" ; fi - -# We first do the compile stage specified below, then the matrix expansion specified after. -stages: - - compile - - test - - special - -env: - global: - - MAKEFLAGS="-j3" - - CCACHE_SLOPPINESS=pch_defines,time_macros - -# Compile stage without building examples/tests to populate the caches. -jobs: -# -------- STAGE 1: COMPILE ----------- - include: -# on Mac, GCC - - stage: compile - os: osx - compiler: gcc - env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -b - - stage: compile - os: osx - compiler: gcc - env: CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -b -# on Mac, CLANG - - stage: compile - os: osx - compiler: clang - env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -b - - stage: compile - os: osx - compiler: clang - env: CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -b -# on Linux, GCC - - stage: compile - os: linux - compiler: gcc - env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -b - - stage: compile - os: linux - compiler: gcc - env: CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -b -# on Linux, CLANG - - stage: compile - os: linux - compiler: clang - env: CC=clang-9 CXX=clang++-9 CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -b - - stage: compile - os: linux - compiler: clang - env: CC=clang-9 CXX=clang++-9 CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -b -# on Linux, with deprecated ON to make sure that path still compiles/tests - - stage: special - os: linux - compiler: clang - env: CC=clang-9 CXX=clang++-9 CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF GTSAM_ALLOW_DEPRECATED_SINCE_V41=ON - script: bash .travis.sh -b -# on Linux, with GTSAM_WITH_TBB on to make sure GTSAM still compiles/tests - # - stage: special - # os: linux - # compiler: gcc - # env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF GTSAM_WITH_TBB=ON - # script: bash .travis.sh -t -# -------- STAGE 2: TESTS ----------- -# on Mac, GCC - - stage: test - os: osx - compiler: clang - env: CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -t - - stage: test - os: osx - compiler: clang - env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -t - - stage: test - os: linux - compiler: gcc - env: CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -t - - stage: test - os: linux - compiler: gcc - env: CMAKE_BUILD_TYPE=Debug GTSAM_BUILD_UNSTABLE=OFF - script: bash .travis.sh -t - - stage: test - os: linux - compiler: clang - env: CC=clang-9 CXX=clang++-9 CMAKE_BUILD_TYPE=Release - script: bash .travis.sh -t -# on Linux, with quaternions ON to make sure that path still compiles/tests - - stage: special - os: linux - compiler: clang - env: CC=clang-9 CXX=clang++-9 CMAKE_BUILD_TYPE=Release GTSAM_BUILD_UNSTABLE=OFF GTSAM_USE_QUATERNIONS=ON - script: bash .travis.sh -t - - stage: special - os: linux - compiler: gcc - env: PYTHON_VERSION=3 - script: bash .travis.python.sh - - stage: special - os: osx - compiler: clang - env: PYTHON_VERSION=3 - script: bash .travis.python.sh diff --git a/CMakeLists.txt b/CMakeLists.txt index 61730d45a..d2b9bc75e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -509,104 +509,104 @@ set(CPACK_DEBIAN_PACKAGE_DEPENDS "libboost-dev (>= 1.43)") #Example: "libc6 (>= # Print configuration variables message(STATUS "===============================================================") message(STATUS "================ Configuration Options ======================") -message(STATUS " CMAKE_CXX_COMPILER_ID type : ${CMAKE_CXX_COMPILER_ID}") -message(STATUS " CMAKE_CXX_COMPILER_VERSION : ${CMAKE_CXX_COMPILER_VERSION}") -message(STATUS " CMake version : ${CMAKE_VERSION}") -message(STATUS " CMake generator : ${CMAKE_GENERATOR}") -message(STATUS " CMake build tool : ${CMAKE_BUILD_TOOL}") +print_config("CMAKE_CXX_COMPILER_ID type" "${CMAKE_CXX_COMPILER_ID}") +print_config("CMAKE_CXX_COMPILER_VERSION" "${CMAKE_CXX_COMPILER_VERSION}") +print_config("CMake version" "${CMAKE_VERSION}") +print_config("CMake generator" "${CMAKE_GENERATOR}") +print_config("CMake build tool" "${CMAKE_BUILD_TOOL}") message(STATUS "Build flags ") -print_config_flag(${GTSAM_BUILD_TESTS} "Build Tests ") -print_config_flag(${GTSAM_BUILD_EXAMPLES_ALWAYS} "Build examples with 'make all' ") -print_config_flag(${GTSAM_BUILD_TIMING_ALWAYS} "Build timing scripts with 'make all'") +print_enabled_config(${GTSAM_BUILD_TESTS} "Build Tests") +print_enabled_config(${GTSAM_BUILD_EXAMPLES_ALWAYS} "Build examples with 'make all'") +print_enabled_config(${GTSAM_BUILD_TIMING_ALWAYS} "Build timing scripts with 'make all'") if (DOXYGEN_FOUND) - print_config_flag(${GTSAM_BUILD_DOCS} "Build Docs ") + print_enabled_config(${GTSAM_BUILD_DOCS} "Build Docs") endif() -print_config_flag(${BUILD_SHARED_LIBS} "Build shared GTSAM libraries ") -print_config_flag(${GTSAM_BUILD_TYPE_POSTFIXES} "Put build type in library name ") +print_enabled_config(${BUILD_SHARED_LIBS} "Build shared GTSAM libraries") +print_enabled_config(${GTSAM_BUILD_TYPE_POSTFIXES} "Put build type in library name") if(GTSAM_UNSTABLE_AVAILABLE) - print_config_flag(${GTSAM_BUILD_UNSTABLE} "Build libgtsam_unstable ") + print_enabled_config(${GTSAM_BUILD_UNSTABLE} "Build libgtsam_unstable") endif() if(NOT MSVC AND NOT XCODE_VERSION) - print_config_flag(${GTSAM_BUILD_WITH_MARCH_NATIVE} "Build for native architecture ") - message(STATUS " Build type : ${CMAKE_BUILD_TYPE}") - message(STATUS " C compilation flags : ${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") - message(STATUS " C++ compilation flags : ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") + print_enabled_config(${GTSAM_BUILD_WITH_MARCH_NATIVE} "Build for native architecture ") + print_config("Build type" "${CMAKE_BUILD_TYPE}") + print_config("C compilation flags" "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") + print_config("C++ compilation flags" "${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}") endif() print_build_options_for_target(gtsam) -message(STATUS " Use System Eigen : ${GTSAM_USE_SYSTEM_EIGEN} (Using version: ${GTSAM_EIGEN_VERSION})") +print_config("Use System Eigen" "${GTSAM_USE_SYSTEM_EIGEN} (Using version: ${GTSAM_EIGEN_VERSION})") if(GTSAM_USE_TBB) - message(STATUS " Use Intel TBB : Yes") + print_config("Use Intel TBB" "Yes") elseif(TBB_FOUND) - message(STATUS " Use Intel TBB : TBB found but GTSAM_WITH_TBB is disabled") + print_config("Use Intel TBB" "TBB found but GTSAM_WITH_TBB is disabled") else() - message(STATUS " Use Intel TBB : TBB not found") + print_config("Use Intel TBB" "TBB not found") endif() if(GTSAM_USE_EIGEN_MKL) - message(STATUS " Eigen will use MKL : Yes") + print_config("Eigen will use MKL" "Yes") elseif(MKL_FOUND) - message(STATUS " Eigen will use MKL : MKL found but GTSAM_WITH_EIGEN_MKL is disabled") + print_config("Eigen will use MKL" "MKL found but GTSAM_WITH_EIGEN_MKL is disabled") else() - message(STATUS " Eigen will use MKL : MKL not found") + print_config("Eigen will use MKL" "MKL not found") endif() if(GTSAM_USE_EIGEN_MKL_OPENMP) - message(STATUS " Eigen will use MKL and OpenMP : Yes") + print_config("Eigen will use MKL and OpenMP" "Yes") elseif(OPENMP_FOUND AND NOT GTSAM_WITH_EIGEN_MKL) - message(STATUS " Eigen will use MKL and OpenMP : OpenMP found but GTSAM_WITH_EIGEN_MKL is disabled") + print_config("Eigen will use MKL and OpenMP" "OpenMP found but GTSAM_WITH_EIGEN_MKL is disabled") elseif(OPENMP_FOUND AND NOT MKL_FOUND) - message(STATUS " Eigen will use MKL and OpenMP : OpenMP found but MKL not found") + print_config("Eigen will use MKL and OpenMP" "OpenMP found but MKL not found") elseif(OPENMP_FOUND) - message(STATUS " Eigen will use MKL and OpenMP : OpenMP found but GTSAM_WITH_EIGEN_MKL_OPENMP is disabled") + print_config("Eigen will use MKL and OpenMP" "OpenMP found but GTSAM_WITH_EIGEN_MKL_OPENMP is disabled") else() - message(STATUS " Eigen will use MKL and OpenMP : OpenMP not found") + print_config("Eigen will use MKL and OpenMP" "OpenMP not found") endif() -message(STATUS " Default allocator : ${GTSAM_DEFAULT_ALLOCATOR}") +print_config("Default allocator" "${GTSAM_DEFAULT_ALLOCATOR}") if(GTSAM_THROW_CHEIRALITY_EXCEPTION) - message(STATUS " Cheirality exceptions enabled : YES") + print_config("Cheirality exceptions enabled" "YES") else() - message(STATUS " Cheirality exceptions enabled : NO") + print_config("Cheirality exceptions enabled" "NO") endif() if(NOT MSVC AND NOT XCODE_VERSION) if(CCACHE_FOUND AND GTSAM_BUILD_WITH_CCACHE) - message(STATUS " Build with ccache : Yes") + print_config("Build with ccache" "Yes") elseif(CCACHE_FOUND) - message(STATUS " Build with ccache : ccache found but GTSAM_BUILD_WITH_CCACHE is disabled") + print_config("Build with ccache" "ccache found but GTSAM_BUILD_WITH_CCACHE is disabled") else() - message(STATUS " Build with ccache : No") + print_config("Build with ccache" "No") endif() endif() -message(STATUS "Packaging flags ") -message(STATUS " CPack Source Generator : ${CPACK_SOURCE_GENERATOR}") -message(STATUS " CPack Generator : ${CPACK_GENERATOR}") +message(STATUS "Packaging flags") +print_config("CPack Source Generator" "${CPACK_SOURCE_GENERATOR}") +print_config("CPack Generator" "${CPACK_GENERATOR}") message(STATUS "GTSAM flags ") -print_config_flag(${GTSAM_USE_QUATERNIONS} "Quaternions as default Rot3 ") -print_config_flag(${GTSAM_ENABLE_CONSISTENCY_CHECKS} "Runtime consistency checking ") -print_config_flag(${GTSAM_ROT3_EXPMAP} "Rot3 retract is full ExpMap ") -print_config_flag(${GTSAM_POSE3_EXPMAP} "Pose3 retract is full ExpMap ") -print_config_flag(${GTSAM_ALLOW_DEPRECATED_SINCE_V41} "Allow features deprecated in GTSAM 4.1") -print_config_flag(${GTSAM_TYPEDEF_POINTS_TO_VECTORS} "Point3 is typedef to Vector3 ") -print_config_flag(${GTSAM_SUPPORT_NESTED_DISSECTION} "Metis-based Nested Dissection ") -print_config_flag(${GTSAM_TANGENT_PREINTEGRATION} "Use tangent-space preintegration") -print_config_flag(${GTSAM_BUILD_WRAP} "Build Wrap ") +print_enabled_config(${GTSAM_USE_QUATERNIONS} "Quaternions as default Rot3 ") +print_enabled_config(${GTSAM_ENABLE_CONSISTENCY_CHECKS} "Runtime consistency checking ") +print_enabled_config(${GTSAM_ROT3_EXPMAP} "Rot3 retract is full ExpMap ") +print_enabled_config(${GTSAM_POSE3_EXPMAP} "Pose3 retract is full ExpMap ") +print_enabled_config(${GTSAM_ALLOW_DEPRECATED_SINCE_V41} "Allow features deprecated in GTSAM 4.1") +print_enabled_config(${GTSAM_TYPEDEF_POINTS_TO_VECTORS} "Point3 is typedef to Vector3 ") +print_enabled_config(${GTSAM_SUPPORT_NESTED_DISSECTION} "Metis-based Nested Dissection ") +print_enabled_config(${GTSAM_TANGENT_PREINTEGRATION} "Use tangent-space preintegration") +print_enabled_config(${GTSAM_BUILD_WRAP} "Build Wrap ") -message(STATUS "MATLAB toolbox flags ") -print_config_flag(${GTSAM_INSTALL_MATLAB_TOOLBOX} "Install MATLAB toolbox ") +message(STATUS "MATLAB toolbox flags") +print_enabled_config(${GTSAM_INSTALL_MATLAB_TOOLBOX} "Install MATLAB toolbox ") if (${GTSAM_INSTALL_MATLAB_TOOLBOX}) - message(STATUS " MATLAB root : ${MATLAB_ROOT}") - message(STATUS " MEX binary : ${MEX_COMMAND}") + print_config("MATLAB root" "${MATLAB_ROOT}") + print_config("MEX binary" "${MEX_COMMAND}") endif() message(STATUS "Cython toolbox flags ") -print_config_flag(${GTSAM_INSTALL_CYTHON_TOOLBOX} "Install Cython toolbox ") +print_enabled_config(${GTSAM_INSTALL_CYTHON_TOOLBOX} "Install Cython toolbox ") if(GTSAM_INSTALL_CYTHON_TOOLBOX) - message(STATUS " Python version : ${GTSAM_PYTHON_VERSION}") + print_config("Python version" "${GTSAM_PYTHON_VERSION}") endif() message(STATUS "===============================================================") diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 3747354cf..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,33 +0,0 @@ -# version format -version: 4.0.3-{branch}-build{build} - -os: Visual Studio 2019 - -clone_folder: c:\projects\gtsam - -platform: x64 -configuration: Release - -environment: - CTEST_OUTPUT_ON_FAILURE: 1 - BOOST_ROOT: C:/Libraries/boost_1_71_0 - -build_script: - - cd c:\projects\gtsam\build - # As of Dec 2019, not all unit tests build cleanly for MSVC, so we'll just - # check that parts of GTSAM build correctly: - #- cmake --build . - - cmake --build . --config Release --target gtsam - - cmake --build . --config Release --target gtsam_unstable - - cmake --build . --config Release --target wrap - #- cmake --build . --target check - - cmake --build . --config Release --target check.base - - cmake --build . --config Release --target check.base_unstable - - cmake --build . --config Release --target check.linear - -before_build: - - cd c:\projects\gtsam - - mkdir build - - cd build - # Disable examples to avoid AppVeyor timeout - - cmake -G "Visual Studio 16 2019" .. -DGTSAM_BUILD_EXAMPLES_ALWAYS=OFF diff --git a/cmake/GtsamPrinting.cmake b/cmake/GtsamPrinting.cmake index e53f9c54f..5757f5ee1 100644 --- a/cmake/GtsamPrinting.cmake +++ b/cmake/GtsamPrinting.cmake @@ -1,14 +1,3 @@ -# print configuration variables -# Usage: -#print_config_flag(${GTSAM_BUILD_TESTS} "Build Tests ") -function(print_config_flag flag msg) - if (flag) - message(STATUS " ${msg}: Enabled") - else () - message(STATUS " ${msg}: Disabled") - endif () -endfunction() - # Based on https://github.com/jimbraun/XCDF/blob/master/cmake/CMakePadString.cmake function(string_pad RESULT_NAME DESIRED_LENGTH VALUE) string(LENGTH "${VALUE}" VALUE_LENGTH) @@ -26,6 +15,27 @@ endfunction() set(GTSAM_PRINT_SUMMARY_PADDING_LENGTH 50 CACHE STRING "Padding of cmake summary report lines after configuring.") mark_as_advanced(GTSAM_PRINT_SUMMARY_PADDING_LENGTH) +# print configuration variables with automatic padding +# Usage: +# print_config(${GTSAM_BUILD_TESTS} "Build Tests") +function(print_config config msg) + string_pad(padded_config ${GTSAM_PRINT_SUMMARY_PADDING_LENGTH} " ${config}") + message(STATUS "${padded_config}: ${msg}") +endfunction() + +# print configuration variable with enabled/disabled value +# Usage: +# print_enabled_config(${GTSAM_BUILD_TESTS} "Build Tests ") +function(print_enabled_config config msg) + string_pad(padded_msg ${GTSAM_PRINT_SUMMARY_PADDING_LENGTH} " ${msg}") + if (config) + message(STATUS "${padded_msg}: Enabled") + else () + message(STATUS "${padded_msg}: Disabled") + endif () +endfunction() + + # Print " var: ${var}" padding with spaces as needed function(print_padded variable_name) string_pad(padded_prop ${GTSAM_PRINT_SUMMARY_PADDING_LENGTH} " ${variable_name}") diff --git a/cython/gtsam/utils/plot.py b/cython/gtsam/utils/plot.py index f69987235..b55f5206a 100644 --- a/cython/gtsam/utils/plot.py +++ b/cython/gtsam/utils/plot.py @@ -155,7 +155,6 @@ def plot_pose2(fignum, pose, axis_length=0.1, covariance=None, axes.set_xlabel(axis_labels[0]) axes.set_ylabel(axis_labels[1]) - axes.set_zlabel(axis_labels[2]) return fig diff --git a/cython/gtsam/utils/visual_data_generator.py b/cython/gtsam/utils/visual_data_generator.py index f04588e70..5ce72fe68 100644 --- a/cython/gtsam/utils/visual_data_generator.py +++ b/cython/gtsam/utils/visual_data_generator.py @@ -30,8 +30,8 @@ class GroundTruth: def __init__(self, K=Cal3_S2(), nrCameras=3, nrPoints=4): self.K = K - self.cameras = [Pose3()] * nrCameras - self.points = [Point3()] * nrPoints + self.cameras = [gtsam.Pose3()] * nrCameras + self.points = [gtsam.Point3(0, 0, 0)] * nrPoints def print_(self, s=""): print(s) @@ -99,11 +99,11 @@ def generate_data(options): r = 40 for i in range(options.nrCameras): theta = i * 2 * np.pi / options.nrCameras - t = Point3(r * np.cos(theta), r * np.sin(theta), height) - truth.cameras[i] = PinholeCameraCal3_S2.Lookat(t, - Point3(), - Point3(0, 0, 1), - truth.K) + t = gtsam.Point3(r * np.cos(theta), r * np.sin(theta), height) + truth.cameras[i] = gtsam.SimpleCamera.Lookat(t, + gtsam.Point3(0, 0, 0), + gtsam.Point3(0, 0, 1), + truth.K) # Create measurements for j in range(nrPoints): # All landmarks seen in every frame diff --git a/examples/Data/example_with_vertices.g2o b/examples/Data/example_with_vertices.g2o index 6c2f1a530..ca7cd86df 100644 --- a/examples/Data/example_with_vertices.g2o +++ b/examples/Data/example_with_vertices.g2o @@ -1,16 +1,16 @@ -VERTEX_SE3:QUAT 8646911284551352320 40 -1.15443e-13 10 0.557345 0.557345 -0.435162 -0.435162 -VERTEX_SE3:QUAT 8646911284551352321 28.2843 28.2843 10 0.301633 0.728207 -0.568567 -0.235508 -VERTEX_SE3:QUAT 8646911284551352322 -1.6986e-08 40 10 -3.89609e-10 0.788205 -0.615412 -2.07622e-10 -VERTEX_SE3:QUAT 8646911284551352323 -28.2843 28.2843 10 -0.301633 0.728207 -0.568567 0.235508 -VERTEX_SE3:QUAT 8646911284551352324 -40 -2.32554e-10 10 -0.557345 0.557345 -0.435162 0.435162 -VERTEX_SE3:QUAT 8646911284551352325 -28.2843 -28.2843 10 -0.728207 0.301633 -0.235508 0.568567 -VERTEX_SE3:QUAT 8646911284551352326 -2.53531e-09 -40 10 -0.788205 -1.25891e-11 -3.82742e-13 0.615412 -VERTEX_SE3:QUAT 8646911284551352327 28.2843 -28.2843 10 -0.728207 -0.301633 0.235508 0.568567 -VERTEX_TRACKXYZ 7782220156096217088 10 10 10 -VERTEX_TRACKXYZ 7782220156096217089 -10 10 10 -VERTEX_TRACKXYZ 7782220156096217090 -10 -10 10 -VERTEX_TRACKXYZ 7782220156096217091 10 -10 10 -VERTEX_TRACKXYZ 7782220156096217092 10 10 -10 -VERTEX_TRACKXYZ 7782220156096217093 -10 10 -10 -VERTEX_TRACKXYZ 7782220156096217094 -10 -10 -10 -VERTEX_TRACKXYZ 7782220156096217095 10 -10 -10 +VERTEX_SE3:QUAT 0 40 -1.15443e-13 10 0.557345 0.557345 -0.435162 -0.435162 +VERTEX_SE3:QUAT 1 28.2843 28.2843 10 0.301633 0.728207 -0.568567 -0.235508 +VERTEX_SE3:QUAT 2 -1.6986e-08 40 10 -3.89609e-10 0.788205 -0.615412 -2.07622e-10 +VERTEX_SE3:QUAT 3 -28.2843 28.2843 10 -0.301633 0.728207 -0.568567 0.235508 +VERTEX_SE3:QUAT 4 -40 -2.32554e-10 10 -0.557345 0.557345 -0.435162 0.435162 +VERTEX_SE3:QUAT 5 -28.2843 -28.2843 10 -0.728207 0.301633 -0.235508 0.568567 +VERTEX_SE3:QUAT 6 -2.53531e-09 -40 10 -0.788205 -1.25891e-11 -3.82742e-13 0.615412 +VERTEX_SE3:QUAT 7 28.2843 -28.2843 10 -0.728207 -0.301633 0.235508 0.568567 +VERTEX_TRACKXYZ 0 10 10 10 +VERTEX_TRACKXYZ 1 -10 10 10 +VERTEX_TRACKXYZ 2 -10 -10 10 +VERTEX_TRACKXYZ 3 10 -10 10 +VERTEX_TRACKXYZ 4 10 10 -10 +VERTEX_TRACKXYZ 5 -10 10 -10 +VERTEX_TRACKXYZ 6 -10 -10 -10 +VERTEX_TRACKXYZ 7 10 -10 -10 diff --git a/gtsam/base/GenericValue.h b/gtsam/base/GenericValue.h index dc205b47f..0e928765f 100644 --- a/gtsam/base/GenericValue.h +++ b/gtsam/base/GenericValue.h @@ -30,6 +30,14 @@ #include #include // operator typeid +#ifdef _WIN32 +#define GENERICVALUE_VISIBILITY +#else +// This will trigger a LNKxxxx on MSVC, so disable for MSVC build +// Please refer to https://github.com/borglab/gtsam/blob/develop/Using-GTSAM-EXPORT.md +#define GENERICVALUE_VISIBILITY GTSAM_EXPORT +#endif + namespace gtsam { /** diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index aadbcf394..b49188b68 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -397,7 +397,7 @@ namespace gtsam { template size_t count() const { size_t i = 0; - for (const auto& key_value : *this) { + for (const auto key_value : *this) { if (dynamic_cast*>(&key_value.value)) ++i; } diff --git a/gtsam/sam/BearingRangeFactor.h b/gtsam/sam/BearingRangeFactor.h index af7b47446..482dbb974 100644 --- a/gtsam/sam/BearingRangeFactor.h +++ b/gtsam/sam/BearingRangeFactor.h @@ -42,14 +42,21 @@ class BearingRangeFactor public: typedef boost::shared_ptr shared_ptr; - /// default constructor + /// Default constructor BearingRangeFactor() {} - /// primary constructor - BearingRangeFactor(Key key1, Key key2, const B& measuredBearing, - const R& measuredRange, const SharedNoiseModel& model) - : Base({key1, key2}, model, T(measuredBearing, measuredRange)) { - this->initialize(expression({key1, key2})); + /// Construct from BearingRange instance + BearingRangeFactor(Key key1, Key key2, const T &bearingRange, + const SharedNoiseModel &model) + : Base({{key1, key2}}, model, T(bearingRange)) { + this->initialize(expression({{key1, key2}})); + } + + /// Construct from separate bearing and range + BearingRangeFactor(Key key1, Key key2, const B &measuredBearing, + const R &measuredRange, const SharedNoiseModel &model) + : Base({{key1, key2}}, model, T(measuredBearing, measuredRange)) { + this->initialize(expression({{key1, key2}})); } virtual ~BearingRangeFactor() {} diff --git a/gtsam/sfm/BinaryMeasurement.h b/gtsam/sfm/BinaryMeasurement.h index c4b4a9804..c525c1b9e 100644 --- a/gtsam/sfm/BinaryMeasurement.h +++ b/gtsam/sfm/BinaryMeasurement.h @@ -15,78 +15,69 @@ * @file BinaryMeasurement.h * @author Akshay Krishnan * @date July 2020 - * @brief Binary measurement represents a measurement between two keys in a graph. - * A binary measurement is similar to a BetweenFactor, except that it does not contain - * an error function. It is a measurement (along with a noise model) from one key to - * another. Note that the direction is important. A measurement from key1 to key2 is not - * the same as the same measurement from key2 to key1. + * @brief Binary measurement represents a measurement between two keys in a + * graph. A binary measurement is similar to a BetweenFactor, except that it + * does not contain an error function. It is a measurement (along with a noise + * model) from one key to another. Note that the direction is important. A + * measurement from key1 to key2 is not the same as the same measurement from + * key2 to key1. */ -#include - #include -#include -#include - +#include #include #include +#include +#include + namespace gtsam { -template -class BinaryMeasurement { - // Check that VALUE type is testable - BOOST_CONCEPT_ASSERT((IsTestable)); - - public: - typedef VALUE T; +template class BinaryMeasurement : public Factor { + // Check that T type is testable + BOOST_CONCEPT_ASSERT((IsTestable)); +public: // shorthand for a smart pointer to a measurement - typedef typename boost::shared_ptr shared_ptr; + using shared_ptr = typename boost::shared_ptr; - private: - Key key1_, key2_; /** Keys */ +private: + T measured_; ///< The measurement + SharedNoiseModel noiseModel_; ///< Noise model - VALUE measured_; /** The measurement */ +public: + BinaryMeasurement(Key key1, Key key2, const T &measured, + const SharedNoiseModel &model = nullptr) + : Factor(std::vector({key1, key2})), measured_(measured), + noiseModel_(model) {} - SharedNoiseModel noiseModel_; /** Noise model */ - - public: - /** Constructor */ - BinaryMeasurement(Key key1, Key key2, const VALUE &measured, - const SharedNoiseModel &model = nullptr) : - key1_(key1), key2_(key2), measured_(measured), noiseModel_(model) { - } - - Key key1() const { return key1_; } - - Key key2() const { return key2_; } + /// @name Standard Interface + /// @{ + Key key1() const { return keys_[0]; } + Key key2() const { return keys_[1]; } + const T &measured() const { return measured_; } const SharedNoiseModel &noiseModel() const { return noiseModel_; } - /** implement functions needed for Testable */ + /// @} + /// @name Testable + /// @{ - /** print */ - void print(const std::string &s, const KeyFormatter &keyFormatter = DefaultKeyFormatter) const { - std::cout << s << "BinaryMeasurement(" - << keyFormatter(this->key1()) << "," + void print(const std::string &s, + const KeyFormatter &keyFormatter = DefaultKeyFormatter) const { + std::cout << s << "BinaryMeasurement(" << keyFormatter(this->key1()) << "," << keyFormatter(this->key2()) << ")\n"; traits::Print(measured_, " measured: "); this->noiseModel_->print(" noise model: "); } - /** equals */ bool equals(const BinaryMeasurement &expected, double tol = 1e-9) const { - const BinaryMeasurement *e = dynamic_cast *> (&expected); - return e != nullptr && key1_ == e->key1_ && - key2_ == e->key2_ - && traits::Equals(this->measured_, e->measured_, tol) && - noiseModel_->equals(*expected.noiseModel()); + const BinaryMeasurement *e = + dynamic_cast *>(&expected); + return e != nullptr && Factor::equals(*e) && + traits::Equals(this->measured_, e->measured_, tol) && + noiseModel_->equals(*expected.noiseModel()); } - - /** return the measured value */ - VALUE measured() const { - return measured_; - } -}; // \class BetweenMeasurement -} \ No newline at end of file + /// @} +}; +} // namespace gtsam \ No newline at end of file diff --git a/gtsam/slam/BetweenFactor.h b/gtsam/slam/BetweenFactor.h index 9afc2f72b..a594e95d5 100644 --- a/gtsam/slam/BetweenFactor.h +++ b/gtsam/slam/BetweenFactor.h @@ -21,6 +21,14 @@ #include #include +#ifdef _WIN32 +#define BETWEENFACTOR_VISIBILITY +#else +// This will trigger a LNKxxxx on MSVC, so disable for MSVC build +// Please refer to https://github.com/borglab/gtsam/blob/develop/Using-GTSAM-EXPORT.md +#define BETWEENFACTOR_VISIBILITY GTSAM_EXPORT +#endif + namespace gtsam { /** diff --git a/gtsam/slam/dataset.cpp b/gtsam/slam/dataset.cpp index 3a2d04127..d7b067d70 100644 --- a/gtsam/slam/dataset.cpp +++ b/gtsam/slam/dataset.cpp @@ -21,21 +21,25 @@ #include #include #include + #include #include #include + +#include +#include + +#include + #include #include -#include -#include -#include -#include + #include #include #include -#include #include #include +#include #include #include @@ -49,14 +53,16 @@ using namespace std; namespace fs = boost::filesystem; -using namespace gtsam::symbol_shorthand; +using gtsam::symbol_shorthand::L; +using gtsam::symbol_shorthand::P; +using gtsam::symbol_shorthand::X; #define LINESIZE 81920 namespace gtsam { /* ************************************************************************* */ -string findExampleDataFile(const string& name) { +string findExampleDataFile(const string &name) { // Search source tree and installed location vector rootsToSearch; @@ -73,88 +79,174 @@ string findExampleDataFile(const string& name) { namesToSearch.push_back(name + ".xml"); // Find first name that exists - for(const fs::path root: rootsToSearch) { - for(const fs::path name: namesToSearch) { + for (const fs::path root : rootsToSearch) { + for (const fs::path name : namesToSearch) { if (fs::is_regular_file(root / name)) return (root / name).string(); } } // If we did not return already, then we did not find the file - throw invalid_argument( - "gtsam::findExampleDataFile could not find a matching file in\n" - GTSAM_SOURCE_TREE_DATASET_DIR " or\n" - GTSAM_INSTALLED_DATASET_DIR " named\n" + name + ", " + name - + ".graph, or " + name + ".txt"); + throw invalid_argument("gtsam::findExampleDataFile could not find a matching " + "file in\n" GTSAM_SOURCE_TREE_DATASET_DIR + " or\n" GTSAM_INSTALLED_DATASET_DIR " named\n" + + name + ", " + name + ".graph, or " + name + ".txt"); } /* ************************************************************************* */ -string createRewrittenFileName(const string& name) { +string createRewrittenFileName(const string &name) { // Search source tree and installed location if (!exists(fs::path(name))) { throw invalid_argument( - "gtsam::createRewrittenFileName could not find a matching file in\n" - + name); + "gtsam::createRewrittenFileName could not find a matching file in\n" + + name); } fs::path p(name); - fs::path newpath = fs::path(p.parent_path().string()) - / fs::path(p.stem().string() + "-rewritten.txt"); + fs::path newpath = fs::path(p.parent_path().string()) / + fs::path(p.stem().string() + "-rewritten.txt"); return newpath.string(); } /* ************************************************************************* */ -GraphAndValues load2D(pair dataset, int maxID, - bool addNoise, bool smart, NoiseFormat noiseFormat, - KernelFunctionType kernelFunctionType) { - return load2D(dataset.first, dataset.second, maxID, addNoise, smart, - noiseFormat, kernelFunctionType); +// Type for parser functions used in parseLines below. +template +using Parser = + std::function(istream &is, const string &tag)>; + +// Parse a file by calling the parse(is, tag) function for every line. +// The result of calling the function is ignored, so typically parse function +// works via a side effect, like adding a factor into a graph etc. +template +static void parseLines(const string &filename, Parser parse) { + ifstream is(filename.c_str()); + if (!is) + throw invalid_argument("parse: can not find file " + filename); + string tag; + while (is >> tag) { + parse(is, tag); // ignore return value + is.ignore(LINESIZE, '\n'); + } } /* ************************************************************************* */ -// Read noise parameters and interpret them according to flags -static SharedNoiseModel readNoiseModel(ifstream& is, bool smart, - NoiseFormat noiseFormat, KernelFunctionType kernelFunctionType) { - double v1, v2, v3, v4, v5, v6; - is >> v1 >> v2 >> v3 >> v4 >> v5 >> v6; +// Parse types T into a size_t-indexed map +template +map parseToMap(const string &filename, Parser> parse, + size_t maxIndex) { + map result; + Parser> emplace = [&](istream &is, const string &tag) { + if (auto t = parse(is, tag)) { + if (!maxIndex || t->first <= maxIndex) + result.emplace(*t); + } + return boost::none; + }; + parseLines(filename, emplace); + return result; +} +/* ************************************************************************* */ +// Parse a file and push results on a vector +template +static vector parseToVector(const string &filename, Parser parse) { + vector result; + Parser add = [&result, parse](istream &is, const string &tag) { + if (auto t = parse(is, tag)) + result.push_back(*t); + return boost::none; + }; + parseLines(filename, add); + return result; +} + +/* ************************************************************************* */ +boost::optional parseVertexPose(istream &is, const string &tag) { + if ((tag == "VERTEX2") || (tag == "VERTEX_SE2") || (tag == "VERTEX")) { + size_t id; + double x, y, yaw; + if (!(is >> id >> x >> y >> yaw)) { + throw std::runtime_error("parseVertexPose encountered malformed line"); + } + return IndexedPose(id, Pose2(x, y, yaw)); + } else { + return boost::none; + } +} + +template <> +std::map parseVariables(const std::string &filename, + size_t maxIndex) { + return parseToMap(filename, parseVertexPose, maxIndex); +} + +/* ************************************************************************* */ +boost::optional parseVertexLandmark(istream &is, + const string &tag) { + if (tag == "VERTEX_XY") { + size_t id; + double x, y; + if (!(is >> id >> x >> y)) { + throw std::runtime_error( + "parseVertexLandmark encountered malformed line"); + } + return IndexedLandmark(id, Point2(x, y)); + } else { + return boost::none; + } +} + +template <> +std::map parseVariables(const std::string &filename, + size_t maxIndex) { + return parseToMap(filename, parseVertexLandmark, maxIndex); +} + +/* ************************************************************************* */ +// Interpret noise parameters according to flags +static SharedNoiseModel +createNoiseModel(const Vector6 v, bool smart, NoiseFormat noiseFormat, + KernelFunctionType kernelFunctionType) { if (noiseFormat == NoiseFormatAUTO) { // Try to guess covariance matrix layout - if (v1 != 0.0 && v2 == 0.0 && v3 != 0.0 && v4 != 0.0 && v5 == 0.0 - && v6 == 0.0) { - // NoiseFormatGRAPH + if (v(0) != 0.0 && v(1) == 0.0 && v(2) != 0.0 && // + v(3) != 0.0 && v(4) == 0.0 && v(5) == 0.0) { noiseFormat = NoiseFormatGRAPH; - } else if (v1 != 0.0 && v2 == 0.0 && v3 == 0.0 && v4 != 0.0 && v5 == 0.0 - && v6 != 0.0) { - // NoiseFormatCOV + } else if (v(0) != 0.0 && v(1) == 0.0 && v(2) == 0.0 && // + v(3) != 0.0 && v(4) == 0.0 && v(5) != 0.0) { noiseFormat = NoiseFormatCOV; } else { throw std::invalid_argument( - "load2D: unrecognized covariance matrix format in dataset file. Please specify the noise format."); + "load2D: unrecognized covariance matrix format in dataset file. " + "Please specify the noise format."); } } // Read matrix and check that diagonal entries are non-zero - Matrix M(3, 3); + Matrix3 M; switch (noiseFormat) { case NoiseFormatG2O: case NoiseFormatCOV: - // i.e., [ v1 v2 v3; v2' v4 v5; v3' v5' v6 ] - if (v1 == 0.0 || v4 == 0.0 || v6 == 0.0) + // i.e., [v(0) v(1) v(2); + // v(1)' v(3) v(4); + // v(2)' v(4)' v(5) ] + if (v(0) == 0.0 || v(3) == 0.0 || v(5) == 0.0) throw runtime_error( "load2D::readNoiseModel looks like this is not G2O matrix order"); - M << v1, v2, v3, v2, v4, v5, v3, v5, v6; + M << v(0), v(1), v(2), v(1), v(3), v(4), v(2), v(4), v(5); break; case NoiseFormatTORO: case NoiseFormatGRAPH: // http://www.openslam.org/toro.html // inf_ff inf_fs inf_ss inf_rr inf_fr inf_sr - // i.e., [ v1 v2 v5; v2' v3 v6; v5' v6' v4 ] - if (v1 == 0.0 || v3 == 0.0 || v4 == 0.0) + // i.e., [v(0) v(1) v(4); + // v(1)' v(2) v(5); + // v(4)' v(5)' v(3) ] + if (v(0) == 0.0 || v(2) == 0.0 || v(3) == 0.0) throw invalid_argument( "load2D::readNoiseModel looks like this is not TORO matrix order"); - M << v1, v2, v5, v2, v3, v6, v5, v6, v4; + M << v(0), v(1), v(4), v(1), v(2), v(5), v(4), v(5), v(3); break; default: throw runtime_error("load2D: invalid noise format"); @@ -195,363 +287,445 @@ static SharedNoiseModel readNoiseModel(ifstream& is, bool smart, } } -#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V41 /* ************************************************************************* */ -boost::optional parseVertex(istream& is, const string& tag) { - return parseVertexPose(is, tag); -} -#endif +boost::optional parseEdge(istream &is, const string &tag) { + if ((tag == "EDGE2") || (tag == "EDGE") || (tag == "EDGE_SE2") || + (tag == "ODOMETRY")) { -/* ************************************************************************* */ -boost::optional parseVertexPose(istream& is, const string& tag) { - if ((tag == "VERTEX2") || (tag == "VERTEX_SE2") || (tag == "VERTEX")) { - Key id; + size_t id1, id2; double x, y, yaw; - is >> id >> x >> y >> yaw; - return IndexedPose(id, Pose2(x, y, yaw)); + if (!(is >> id1 >> id2 >> x >> y >> yaw)) { + throw std::runtime_error("parseEdge encountered malformed line"); + } + return IndexedEdge({id1, id2}, Pose2(x, y, yaw)); } else { return boost::none; } } /* ************************************************************************* */ -boost::optional parseVertexLandmark(istream& is, const string& tag) { - if (tag == "VERTEX_XY") { - Key id; - double x, y; - is >> id >> x >> y; - return IndexedLandmark(id, Point2(x, y)); - } else { - return boost::none; +// Measurement parsers are implemented as a functor, as they have several +// options to filter and create the measurement model. +template struct ParseMeasurement; + +/* ************************************************************************* */ +// Converting from Measurement to BetweenFactor is generic +template struct ParseFactor : ParseMeasurement { + ParseFactor(const ParseMeasurement &parent) + : ParseMeasurement(parent) {} + + // We parse a measurement then convert + typename boost::optional::shared_ptr> + operator()(istream &is, const string &tag) { + if (auto m = ParseMeasurement::operator()(is, tag)) + return boost::make_shared>( + m->key1(), m->key2(), m->measured(), m->noiseModel()); + else + return boost::none; } +}; + +/* ************************************************************************* */ +// Pose2 measurement parser +template <> struct ParseMeasurement { + // The arguments + boost::shared_ptr sampler; + size_t maxIndex; + + // For noise model creation + bool smart; + NoiseFormat noiseFormat; + KernelFunctionType kernelFunctionType; + + // If this is not null, will use instead of parsed model: + SharedNoiseModel model; + + // The actual parser + boost::optional> operator()(istream &is, + const string &tag) { + auto edge = parseEdge(is, tag); + if (!edge) + return boost::none; + + // parse noise model + Vector6 v; + is >> v(0) >> v(1) >> v(2) >> v(3) >> v(4) >> v(5); + + // optional filter + size_t id1, id2; + tie(id1, id2) = edge->first; + if (maxIndex && (id1 > maxIndex || id2 > maxIndex)) + return boost::none; + + // Get pose and optionally add noise + Pose2 &pose = edge->second; + if (sampler) + pose = pose.retract(sampler->sample()); + + // emplace measurement + auto modelFromFile = + createNoiseModel(v, smart, noiseFormat, kernelFunctionType); + return BinaryMeasurement(id1, id2, pose, + model ? model : modelFromFile); + } +}; + +/* ************************************************************************* */ +// Create a sampler to corrupt a measurement +boost::shared_ptr createSampler(const SharedNoiseModel &model) { + auto noise = boost::dynamic_pointer_cast(model); + if (!noise) + throw invalid_argument("gtsam::load: invalid noise model for adding noise" + "(current version assumes diagonal noise model)!"); + return boost::shared_ptr(new Sampler(noise)); } /* ************************************************************************* */ -boost::optional parseEdge(istream& is, const string& tag) { - if ((tag == "EDGE2") || (tag == "EDGE") || (tag == "EDGE_SE2") - || (tag == "ODOMETRY")) { - - Key id1, id2; - double x, y, yaw; - is >> id1 >> id2 >> x >> y >> yaw; - return IndexedEdge(pair(id1, id2), Pose2(x, y, yaw)); - } else { - return boost::none; - } +// Implementation of parseMeasurements for Pose2 +template <> +std::vector> +parseMeasurements(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + ParseMeasurement parse{model ? createSampler(model) : nullptr, + maxIndex, true, NoiseFormatAUTO, + KernelFunctionTypeNONE}; + return parseToVector>(filename, parse); } /* ************************************************************************* */ -GraphAndValues load2D(const string& filename, SharedNoiseModel model, Key maxID, - bool addNoise, bool smart, NoiseFormat noiseFormat, - KernelFunctionType kernelFunctionType) { +// Implementation of parseMeasurements for Rot2, which converts from Pose2 - ifstream is(filename.c_str()); - if (!is) - throw invalid_argument("load2D: can not find file " + filename); +// Extract Rot2 measurement from Pose2 measurement +static BinaryMeasurement convert(const BinaryMeasurement &p) { + auto gaussian = + boost::dynamic_pointer_cast(p.noiseModel()); + if (!gaussian) + throw std::invalid_argument( + "parseMeasurements can only convert Pose2 measurements " + "with Gaussian noise models."); + const Matrix3 M = gaussian->covariance(); + return BinaryMeasurement(p.key1(), p.key2(), p.measured().rotation(), + noiseModel::Isotropic::Variance(1, M(2, 2))); +} - Values::shared_ptr initial(new Values); - NonlinearFactorGraph::shared_ptr graph(new NonlinearFactorGraph); +template <> +std::vector> +parseMeasurements(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + auto poses = parseMeasurements(filename, model, maxIndex); + std::vector> result; + result.reserve(poses.size()); + for (const auto &p : poses) + result.push_back(convert(p)); + return result; +} - string tag; +/* ************************************************************************* */ +// Implementation of parseFactors for Pose2 +template <> +std::vector::shared_ptr> +parseFactors(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + ParseFactor parse({model ? createSampler(model) : nullptr, maxIndex, + true, NoiseFormatAUTO, KernelFunctionTypeNONE, + nullptr}); + return parseToVector::shared_ptr>(filename, parse); +} - // load the poses and landmarks - while (!is.eof()) { - if (!(is >> tag)) - break; +/* ************************************************************************* */ +using BearingRange2D = BearingRange; - const auto indexed_pose = parseVertexPose(is, tag); - if (indexed_pose) { - Key id = indexed_pose->first; +template <> struct ParseMeasurement { + // arguments + size_t maxIndex; - // optional filter - if (maxID && id >= maxID) - continue; + // The actual parser + boost::optional> + operator()(istream &is, const string &tag) { + if (tag != "BR" && tag != "LANDMARK") + return boost::none; - initial->insert(id, indexed_pose->second); - } - const auto indexed_landmark = parseVertexLandmark(is, tag); - if (indexed_landmark) { - Key id = indexed_landmark->first; - - // optional filter - if (maxID && id >= maxID) - continue; - - initial->insert(id, indexed_landmark->second); - } - is.ignore(LINESIZE, '\n'); - } - is.clear(); /* clears the end-of-file and error flags */ - is.seekg(0, ios::beg); - - // If asked, create a sampler with random number generator - std::unique_ptr sampler; - if (addNoise) { - noiseModel::Diagonal::shared_ptr noise; - if (model) - noise = boost::dynamic_pointer_cast(model); - if (!noise) - throw invalid_argument( - "gtsam::load2D: invalid noise model for adding noise" - "(current version assumes diagonal noise model)!"); - sampler.reset(new Sampler(noise)); - } - - // Parse the pose constraints - Key id1, id2; - bool haveLandmark = false; - const bool useModelInFile = !model; - while (!is.eof()) { - if (!(is >> tag)) - break; - - auto between_pose = parseEdge(is, tag); - if (between_pose) { - std::tie(id1, id2) = between_pose->first; - Pose2& l1Xl2 = between_pose->second; - - // read noise model - SharedNoiseModel modelInFile = readNoiseModel(is, smart, noiseFormat, - kernelFunctionType); - - // optional filter - if (maxID && (id1 >= maxID || id2 >= maxID)) - continue; - - if (useModelInFile) - model = modelInFile; - - if (addNoise) - l1Xl2 = l1Xl2.retract(sampler->sample()); - - // Insert vertices if pure odometry file - if (!initial->exists(id1)) - initial->insert(id1, Pose2()); - if (!initial->exists(id2)) - initial->insert(id2, initial->at(id1) * l1Xl2); - - NonlinearFactor::shared_ptr factor( - new BetweenFactor(id1, id2, l1Xl2, model)); - graph->push_back(factor); - } - // Parse measurements + size_t id1, id2; + is >> id1 >> id2; double bearing, range, bearing_std, range_std; - // A bearing-range measurement if (tag == "BR") { - is >> id1 >> id2 >> bearing >> range >> bearing_std >> range_std; + is >> bearing >> range >> bearing_std >> range_std; } - // A landmark measurement, TODO Frank says: don't know why is converted to bearing-range + // A landmark measurement, converted to bearing-range if (tag == "LANDMARK") { double lmx, lmy; double v1, v2, v3; - is >> id1 >> id2 >> lmx >> lmy >> v1 >> v2 >> v3; + is >> lmx >> lmy >> v1 >> v2 >> v3; // Convert x,y to bearing,range bearing = atan2(lmy, lmx); range = sqrt(lmx * lmx + lmy * lmy); - // In our experience, the x-y covariance on landmark sightings is not very good, so assume - // it describes the uncertainty at a range of 10m, and convert that to bearing/range uncertainty. + // In our experience, the x-y covariance on landmark sightings is not + // very good, so assume it describes the uncertainty at a range of 10m, + // and convert that to bearing/range uncertainty. if (std::abs(v1 - v3) < 1e-4) { bearing_std = sqrt(v1 / 10.0); range_std = sqrt(v1); } else { + // TODO(frank): we are ignoring the non-uniform covariance bearing_std = 1; range_std = 1; - if (!haveLandmark) { - cout - << "Warning: load2D is a very simple dataset loader and is ignoring the\n" - "non-uniform covariance on LANDMARK measurements in this file." - << endl; - haveLandmark = true; - } } } - // Do some common stuff for bearing-range measurements - if (tag == "LANDMARK" || tag == "BR") { + // optional filter + if (maxIndex && id1 > maxIndex) + return boost::none; - // optional filter - if (maxID && id1 >= maxID) - continue; + // Create noise model + auto measurementNoise = noiseModel::Diagonal::Sigmas( + (Vector(2) << bearing_std, range_std).finished()); - // Create noise model - noiseModel::Diagonal::shared_ptr measurementNoise = - noiseModel::Diagonal::Sigmas((Vector(2) << bearing_std, range_std).finished()); + return BinaryMeasurement( + id1, L(id2), BearingRange2D(bearing, range), measurementNoise); + } +}; - // Add to graph - *graph += BearingRangeFactor(id1, L(id2), bearing, range, - measurementNoise); +/* ************************************************************************* */ +GraphAndValues load2D(const string &filename, SharedNoiseModel model, + size_t maxIndex, bool addNoise, bool smart, + NoiseFormat noiseFormat, + KernelFunctionType kernelFunctionType) { + + // Single pass for poses and landmarks. + auto initial = boost::make_shared(); + Parser insert = [maxIndex, &initial](istream &is, const string &tag) { + if (auto indexedPose = parseVertexPose(is, tag)) { + if (!maxIndex || indexedPose->first <= maxIndex) + initial->insert(indexedPose->first, indexedPose->second); + } else if (auto indexedLandmark = parseVertexLandmark(is, tag)) { + if (!maxIndex || indexedLandmark->first <= maxIndex) + initial->insert(L(indexedLandmark->first), indexedLandmark->second); + } + return 0; + }; + parseLines(filename, insert); + + // Single pass for Pose2 and bearing-range factors. + auto graph = boost::make_shared(); + + // Instantiate factor parser + ParseFactor parseBetweenFactor( + {addNoise ? createSampler(model) : nullptr, maxIndex, smart, noiseFormat, + kernelFunctionType, model}); + + // Instantiate bearing-range parser + ParseMeasurement parseBearingRange{maxIndex}; + + // Combine in a single parser that adds factors to `graph`, but also inserts + // new variables into `initial` when needed. + Parser parse = [&](istream &is, const string &tag) { + if (auto f = parseBetweenFactor(is, tag)) { + graph->push_back(*f); + + // Insert vertices if pure odometry file + Key key1 = (*f)->key1(), key2 = (*f)->key2(); + if (!initial->exists(key1)) + initial->insert(key1, Pose2()); + if (!initial->exists(key2)) + initial->insert(key2, initial->at(key1) * (*f)->measured()); + } else if (auto m = parseBearingRange(is, tag)) { + Key key1 = m->key1(), key2 = m->key2(); + BearingRange2D br = m->measured(); + graph->emplace_shared>(key1, key2, br, + m->noiseModel()); // Insert poses or points if they do not exist yet - if (!initial->exists(id1)) - initial->insert(id1, Pose2()); - if (!initial->exists(L(id2))) { - Pose2 pose = initial->at(id1); - Point2 local(cos(bearing) * range, sin(bearing) * range); + if (!initial->exists(key1)) + initial->insert(key1, Pose2()); + if (!initial->exists(key2)) { + Pose2 pose = initial->at(key1); + Point2 local = br.bearing() * Point2(br.range(), 0); Point2 global = pose.transformFrom(local); - initial->insert(L(id2), global); + initial->insert(key2, global); } } - is.ignore(LINESIZE, '\n'); - } + return 0; + }; + + parseLines(filename, parse); return make_pair(graph, initial); } /* ************************************************************************* */ -GraphAndValues load2D_robust(const string& filename, - noiseModel::Base::shared_ptr& model, int maxID) { - return load2D(filename, model, maxID); +GraphAndValues load2D(pair dataset, size_t maxIndex, + bool addNoise, bool smart, NoiseFormat noiseFormat, + KernelFunctionType kernelFunctionType) { + return load2D(dataset.first, dataset.second, maxIndex, addNoise, smart, + noiseFormat, kernelFunctionType); } /* ************************************************************************* */ -void save2D(const NonlinearFactorGraph& graph, const Values& config, - const noiseModel::Diagonal::shared_ptr model, const string& filename) { +GraphAndValues load2D_robust(const string &filename, + noiseModel::Base::shared_ptr &model, + size_t maxIndex) { + return load2D(filename, model, maxIndex); +} + +/* ************************************************************************* */ +void save2D(const NonlinearFactorGraph &graph, const Values &config, + const noiseModel::Diagonal::shared_ptr model, + const string &filename) { fstream stream(filename.c_str(), fstream::out); // save poses - - for(const Values::ConstKeyValuePair& key_value: config) { - const Pose2& pose = key_value.value.cast(); + for (const Values::ConstKeyValuePair key_value : config) { + const Pose2 &pose = key_value.value.cast(); stream << "VERTEX2 " << key_value.key << " " << pose.x() << " " << pose.y() - << " " << pose.theta() << endl; + << " " << pose.theta() << endl; } // save edges - Matrix R = model->R(); - Matrix RR = trans(R) * R; //prod(trans(R),R); - for(boost::shared_ptr factor_: graph) { - boost::shared_ptr > factor = - boost::dynamic_pointer_cast >(factor_); + // TODO(frank): why don't we use model in factor? + Matrix3 R = model->R(); + Matrix3 RR = R.transpose() * R; + for (auto f : graph) { + auto factor = boost::dynamic_pointer_cast>(f); if (!factor) continue; - Pose2 pose = factor->measured().inverse(); + const Pose2 pose = factor->measured().inverse(); stream << "EDGE2 " << factor->key2() << " " << factor->key1() << " " - << pose.x() << " " << pose.y() << " " << pose.theta() << " " << RR(0, 0) - << " " << RR(0, 1) << " " << RR(1, 1) << " " << RR(2, 2) << " " - << RR(0, 2) << " " << RR(1, 2) << endl; + << pose.x() << " " << pose.y() << " " << pose.theta() << " " + << RR(0, 0) << " " << RR(0, 1) << " " << RR(1, 1) << " " << RR(2, 2) + << " " << RR(0, 2) << " " << RR(1, 2) << endl; } stream.close(); } /* ************************************************************************* */ -GraphAndValues readG2o(const string& g2oFile, const bool is3D, +GraphAndValues readG2o(const string &g2oFile, const bool is3D, KernelFunctionType kernelFunctionType) { if (is3D) { return load3D(g2oFile); } else { // just call load2D - int maxID = 0; + size_t maxIndex = 0; bool addNoise = false; bool smart = true; - return load2D(g2oFile, SharedNoiseModel(), maxID, addNoise, smart, + return load2D(g2oFile, SharedNoiseModel(), maxIndex, addNoise, smart, NoiseFormatG2O, kernelFunctionType); } } /* ************************************************************************* */ -void writeG2o(const NonlinearFactorGraph& graph, const Values& estimate, - const string& filename) { +void writeG2o(const NonlinearFactorGraph &graph, const Values &estimate, + const string &filename) { fstream stream(filename.c_str(), fstream::out); + // Use a lambda here to more easily modify behavior in future. + auto index = [](gtsam::Key key) { return Symbol(key).index(); }; + // save 2D poses - for (const auto& key_value : estimate) { - auto p = dynamic_cast*>(&key_value.value); - if (!p) continue; - const Pose2& pose = p->value(); - stream << "VERTEX_SE2 " << key_value.key << " " << pose.x() << " " - << pose.y() << " " << pose.theta() << endl; + for (const auto key_value : estimate) { + auto p = dynamic_cast *>(&key_value.value); + if (!p) + continue; + const Pose2 &pose = p->value(); + stream << "VERTEX_SE2 " << index(key_value.key) << " " << pose.x() << " " + << pose.y() << " " << pose.theta() << endl; } // save 3D poses - for(const auto& key_value: estimate) { - auto p = dynamic_cast*>(&key_value.value); - if (!p) continue; - const Pose3& pose = p->value(); + for (const auto key_value : estimate) { + auto p = dynamic_cast *>(&key_value.value); + if (!p) + continue; + const Pose3 &pose = p->value(); const Point3 t = pose.translation(); const auto q = pose.rotation().toQuaternion(); - stream << "VERTEX_SE3:QUAT " << key_value.key << " " << t.x() << " " - << t.y() << " " << t.z() << " " << q.x() << " " << q.y() << " " - << q.z() << " " << q.w() << endl; + stream << "VERTEX_SE3:QUAT " << index(key_value.key) << " " << t.x() << " " + << t.y() << " " << t.z() << " " << q.x() << " " << q.y() << " " + << q.z() << " " << q.w() << endl; } // save 2D landmarks - for(const auto& key_value: estimate) { - auto p = dynamic_cast*>(&key_value.value); - if (!p) continue; - const Point2& point = p->value(); - stream << "VERTEX_XY " << key_value.key << " " << point.x() << " " - << point.y() << endl; + for (const auto key_value : estimate) { + auto p = dynamic_cast *>(&key_value.value); + if (!p) + continue; + const Point2 &point = p->value(); + stream << "VERTEX_XY " << index(key_value.key) << " " << point.x() << " " + << point.y() << endl; } // save 3D landmarks - for(const auto& key_value: estimate) { - auto p = dynamic_cast*>(&key_value.value); - if (!p) continue; - const Point3& point = p->value(); - stream << "VERTEX_TRACKXYZ " << key_value.key << " " << point.x() << " " - << point.y() << " " << point.z() << endl; + for (const auto key_value : estimate) { + auto p = dynamic_cast *>(&key_value.value); + if (!p) + continue; + const Point3 &point = p->value(); + stream << "VERTEX_TRACKXYZ " << index(key_value.key) << " " << point.x() + << " " << point.y() << " " << point.z() << endl; } // save edges (2D or 3D) - for(const auto& factor_: graph) { - boost::shared_ptr > factor = - boost::dynamic_pointer_cast >(factor_); - if (factor){ + for (const auto &factor_ : graph) { + auto factor = boost::dynamic_pointer_cast>(factor_); + if (factor) { SharedNoiseModel model = factor->noiseModel(); - boost::shared_ptr gaussianModel = + auto gaussianModel = boost::dynamic_pointer_cast(model); - if (!gaussianModel){ + if (!gaussianModel) { model->print("model\n"); throw invalid_argument("writeG2o: invalid noise model!"); } - Matrix Info = gaussianModel->R().transpose() * gaussianModel->R(); + Matrix3 Info = gaussianModel->R().transpose() * gaussianModel->R(); Pose2 pose = factor->measured(); //.inverse(); - stream << "EDGE_SE2 " << factor->key1() << " " << factor->key2() << " " - << pose.x() << " " << pose.y() << " " << pose.theta(); - for (int i = 0; i < 3; i++){ - for (int j = i; j < 3; j++){ + stream << "EDGE_SE2 " << index(factor->key1()) << " " + << index(factor->key2()) << " " << pose.x() << " " << pose.y() + << " " << pose.theta(); + for (size_t i = 0; i < 3; i++) { + for (size_t j = i; j < 3; j++) { stream << " " << Info(i, j); } } stream << endl; } - boost::shared_ptr< BetweenFactor > factor3D = - boost::dynamic_pointer_cast< BetweenFactor >(factor_); + auto factor3D = boost::dynamic_pointer_cast>(factor_); - if (factor3D){ + if (factor3D) { SharedNoiseModel model = factor3D->noiseModel(); boost::shared_ptr gaussianModel = boost::dynamic_pointer_cast(model); - if (!gaussianModel){ + if (!gaussianModel) { model->print("model\n"); throw invalid_argument("writeG2o: invalid noise model!"); } - Matrix Info = gaussianModel->R().transpose() * gaussianModel->R(); + Matrix6 Info = gaussianModel->R().transpose() * gaussianModel->R(); const Pose3 pose3D = factor3D->measured(); const Point3 p = pose3D.translation(); const auto q = pose3D.rotation().toQuaternion(); - stream << "EDGE_SE3:QUAT " << factor3D->key1() << " " << factor3D->key2() - << " " << p.x() << " " << p.y() << " " << p.z() << " " << q.x() - << " " << q.y() << " " << q.z() << " " << q.w(); + stream << "EDGE_SE3:QUAT " << index(factor3D->key1()) << " " + << index(factor3D->key2()) << " " << p.x() << " " << p.y() << " " + << p.z() << " " << q.x() << " " << q.y() << " " << q.z() << " " + << q.w(); - Matrix InfoG2o = I_6x6; - InfoG2o.block(0,0,3,3) = Info.block(3,3,3,3); // cov translation - InfoG2o.block(3,3,3,3) = Info.block(0,0,3,3); // cov rotation - InfoG2o.block(0,3,3,3) = Info.block(0,3,3,3); // off diagonal - InfoG2o.block(3,0,3,3) = Info.block(3,0,3,3); // off diagonal + Matrix6 InfoG2o = I_6x6; + InfoG2o.block<3, 3>(0, 0) = Info.block<3, 3>(3, 3); // cov translation + InfoG2o.block<3, 3>(3, 3) = Info.block<3, 3>(0, 0); // cov rotation + InfoG2o.block<3, 3>(0, 3) = Info.block<3, 3>(0, 3); // off diagonal + InfoG2o.block<3, 3>(3, 0) = Info.block<3, 3>(3, 0); // off diagonal - for (int i = 0; i < 6; i++){ - for (int j = i; j < 6; j++){ + for (size_t i = 0; i < 6; i++) { + for (size_t j = i; j < 6; j++) { stream << " " << InfoG2o(i, j); } } @@ -562,153 +736,215 @@ void writeG2o(const NonlinearFactorGraph& graph, const Values& estimate, } /* ************************************************************************* */ -static Rot3 NormalizedRot3(double w, double x, double y, double z) { +// parse quaternion in x,y,z,w order, and normalize to unit length +istream &operator>>(istream &is, Quaternion &q) { + double x, y, z, w; + is >> x >> y >> z >> w; const double norm = sqrt(w * w + x * x + y * y + z * z), f = 1.0 / norm; - return Rot3::Quaternion(f * w, f * x, f * y, f * z); + q = Quaternion(f * w, f * x, f * y, f * z); + return is; } /* ************************************************************************* */ -std::map parse3DPoses(const string& filename) { - ifstream is(filename.c_str()); - if (!is) - throw invalid_argument("parse3DPoses: can not find file " + filename); - - std::map poses; - while (!is.eof()) { - char buf[LINESIZE]; - is.getline(buf, LINESIZE); - istringstream ls(buf); - string tag; - ls >> tag; - - if (tag == "VERTEX3") { - Key id; - double x, y, z, roll, pitch, yaw; - ls >> id >> x >> y >> z >> roll >> pitch >> yaw; - poses.emplace(id, Pose3(Rot3::Ypr(yaw, pitch, roll), {x, y, z})); - } - if (tag == "VERTEX_SE3:QUAT") { - Key id; - double x, y, z, qx, qy, qz, qw; - ls >> id >> x >> y >> z >> qx >> qy >> qz >> qw; - poses.emplace(id, Pose3(NormalizedRot3(qw, qx, qy, qz), {x, y, z})); - } - } - return poses; +// parse Rot3 from roll, pitch, yaw +istream &operator>>(istream &is, Rot3 &R) { + double yaw, pitch, roll; + is >> roll >> pitch >> yaw; // notice order ! + R = Rot3::Ypr(yaw, pitch, roll); + return is; } /* ************************************************************************* */ -std::map parse3DLandmarks(const string& filename) { - ifstream is(filename.c_str()); - if (!is) - throw invalid_argument("parse3DLandmarks: can not find file " + filename); +boost::optional> parseVertexPose3(istream &is, + const string &tag) { + if (tag == "VERTEX3") { + size_t id; + double x, y, z; + Rot3 R; + is >> id >> x >> y >> z >> R; + return make_pair(id, Pose3(R, {x, y, z})); + } else if (tag == "VERTEX_SE3:QUAT") { + size_t id; + double x, y, z; + Quaternion q; + is >> id >> x >> y >> z >> q; + return make_pair(id, Pose3(q, {x, y, z})); + } else + return boost::none; +} - std::map landmarks; - while (!is.eof()) { - char buf[LINESIZE]; - is.getline(buf, LINESIZE); - istringstream ls(buf); - string tag; - ls >> tag; - - if (tag == "VERTEX_TRACKXYZ") { - Key id; - double x, y, z; - ls >> id >> x >> y >> z; - landmarks.emplace(id, Point3(x, y, z)); - } - } - return landmarks; +template <> +std::map parseVariables(const std::string &filename, + size_t maxIndex) { + return parseToMap(filename, parseVertexPose3, maxIndex); } /* ************************************************************************* */ -BetweenFactorPose3s parse3DFactors( - const string& filename, - const noiseModel::Diagonal::shared_ptr& corruptingNoise) { - ifstream is(filename.c_str()); - if (!is) throw invalid_argument("parse3DFactors: can not find file " + filename); +boost::optional> parseVertexPoint3(istream &is, + const string &tag) { + if (tag == "VERTEX_TRACKXYZ") { + size_t id; + double x, y, z; + is >> id >> x >> y >> z; + return make_pair(id, Point3(x, y, z)); + } else + return boost::none; +} - boost::optional sampler; - if (corruptingNoise) { - sampler = Sampler(corruptingNoise); - } +template <> +std::map parseVariables(const std::string &filename, + size_t maxIndex) { + return parseToMap(filename, parseVertexPoint3, maxIndex); +} - std::vector::shared_ptr> factors; - while (!is.eof()) { - char buf[LINESIZE]; - is.getline(buf, LINESIZE); - istringstream ls(buf); - string tag; - ls >> tag; +/* ************************************************************************* */ +// Parse a symmetric covariance matrix (onlyupper-triangular part is stored) +istream &operator>>(istream &is, Matrix6 &m) { + for (size_t i = 0; i < 6; i++) + for (size_t j = i; j < 6; j++) { + is >> m(i, j); + m(j, i) = m(i, j); + } + return is; +} +/* ************************************************************************* */ +// Pose3 measurement parser +template <> struct ParseMeasurement { + // The arguments + boost::shared_ptr sampler; + size_t maxIndex; + + // The actual parser + boost::optional> operator()(istream &is, + const string &tag) { + if (tag != "EDGE3" && tag != "EDGE_SE3:QUAT") + return boost::none; + + // parse ids and optionally filter + size_t id1, id2; + is >> id1 >> id2; + if (maxIndex && (id1 > maxIndex || id2 > maxIndex)) + return boost::none; + + Matrix6 m; if (tag == "EDGE3") { - Key id1, id2; - double x, y, z, roll, pitch, yaw; - ls >> id1 >> id2 >> x >> y >> z >> roll >> pitch >> yaw; - Matrix m(6, 6); - for (size_t i = 0; i < 6; i++) - for (size_t j = i; j < 6; j++) ls >> m(i, j); - SharedNoiseModel model = noiseModel::Gaussian::Information(m); - factors.emplace_back(new BetweenFactor( - id1, id2, Pose3(Rot3::Ypr(yaw, pitch, roll), {x, y, z}), model)); - } - if (tag == "EDGE_SE3:QUAT") { - Key id1, id2; - double x, y, z, qx, qy, qz, qw; - ls >> id1 >> id2 >> x >> y >> z >> qx >> qy >> qz >> qw; - Matrix m(6, 6); - for (size_t i = 0; i < 6; i++) { - for (size_t j = i; j < 6; j++) { - double mij; - ls >> mij; - m(i, j) = mij; - m(j, i) = mij; - } - } - Matrix mgtsam(6, 6); + double x, y, z; + Rot3 R; + is >> x >> y >> z >> R >> m; + Pose3 T12(R, {x, y, z}); + // optionally add noise + if (sampler) + T12 = T12.retract(sampler->sample()); - mgtsam.block<3, 3>(0, 0) = m.block<3, 3>(3, 3); // cov rotation - mgtsam.block<3, 3>(3, 3) = m.block<3, 3>(0, 0); // cov translation - mgtsam.block<3, 3>(0, 3) = m.block<3, 3>(0, 3); // off diagonal - mgtsam.block<3, 3>(3, 0) = m.block<3, 3>(3, 0); // off diagonal + return BinaryMeasurement(id1, id2, T12, + noiseModel::Gaussian::Information(m)); + } else if (tag == "EDGE_SE3:QUAT") { + double x, y, z; + Quaternion q; + is >> x >> y >> z >> q >> m; + Pose3 T12(q, {x, y, z}); + // optionally add noise + if (sampler) + T12 = T12.retract(sampler->sample()); + + // EDGE_SE3:QUAT stores covariance in t,R order, unlike GTSAM: + Matrix6 mgtsam; + mgtsam.block<3, 3>(0, 0) = m.block<3, 3>(3, 3); // cov rotation + mgtsam.block<3, 3>(3, 3) = m.block<3, 3>(0, 0); // cov translation + mgtsam.block<3, 3>(0, 3) = m.block<3, 3>(0, 3); // off diagonal + mgtsam.block<3, 3>(3, 0) = m.block<3, 3>(3, 0); // off diagonal SharedNoiseModel model = noiseModel::Gaussian::Information(mgtsam); - auto R12 = NormalizedRot3(qw, qx, qy, qz); - if (sampler) { - R12 = R12.retract(sampler->sample()); - } - factors.emplace_back(new BetweenFactor( - id1, id2, Pose3(R12, {x, y, z}), model)); - } + return BinaryMeasurement( + id1, id2, T12, noiseModel::Gaussian::Information(mgtsam)); + } else + return boost::none; } - return factors; +}; + +/* ************************************************************************* */ +// Implementation of parseMeasurements for Pose3 +template <> +std::vector> +parseMeasurements(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + ParseMeasurement parse{model ? createSampler(model) : nullptr, + maxIndex}; + return parseToVector>(filename, parse); } /* ************************************************************************* */ -GraphAndValues load3D(const string& filename) { - const auto factors = parse3DFactors(filename); - NonlinearFactorGraph::shared_ptr graph(new NonlinearFactorGraph); - for (const auto& factor : factors) { - graph->push_back(factor); - } +// Implementation of parseMeasurements for Rot3, which converts from Pose3 - Values::shared_ptr initial(new Values); +// Extract Rot3 measurement from Pose3 measurement +static BinaryMeasurement convert(const BinaryMeasurement &p) { + auto gaussian = + boost::dynamic_pointer_cast(p.noiseModel()); + if (!gaussian) + throw std::invalid_argument( + "parseMeasurements can only convert Pose3 measurements " + "with Gaussian noise models."); + const Matrix6 M = gaussian->covariance(); + return BinaryMeasurement( + p.key1(), p.key2(), p.measured().rotation(), + noiseModel::Gaussian::Covariance(M.block<3, 3>(0, 0), true)); +} - const auto poses = parse3DPoses(filename); - for (const auto& key_pose : poses) { - initial->insert(key_pose.first, key_pose.second); - } - const auto landmarks = parse3DLandmarks(filename); - for (const auto& key_landmark : landmarks) { - initial->insert(key_landmark.first, key_landmark.second); - } +template <> +std::vector> +parseMeasurements(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + auto poses = parseMeasurements(filename, model, maxIndex); + std::vector> result; + result.reserve(poses.size()); + for (const auto &p : poses) + result.push_back(convert(p)); + return result; +} + +/* ************************************************************************* */ +// Implementation of parseFactors for Pose3 +template <> +std::vector::shared_ptr> +parseFactors(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, + size_t maxIndex) { + ParseFactor parse({model ? createSampler(model) : nullptr, maxIndex}); + return parseToVector::shared_ptr>(filename, parse); +} + +/* ************************************************************************* */ +GraphAndValues load3D(const string &filename) { + auto graph = boost::make_shared(); + auto initial = boost::make_shared(); + + // Instantiate factor parser. maxIndex is always zero for load3D. + ParseFactor parseFactor({nullptr, 0}); + + // Single pass for variables and factors. Unlike 2D version, does *not* insert + // variables into `initial` if referenced but not present. + Parser parse = [&](istream &is, const string &tag) { + if (auto indexedPose = parseVertexPose3(is, tag)) { + initial->insert(indexedPose->first, indexedPose->second); + } else if (auto indexedLandmark = parseVertexPoint3(is, tag)) { + initial->insert(L(indexedLandmark->first), indexedLandmark->second); + } else if (auto factor = parseFactor(is, tag)) { + graph->push_back(*factor); + } + return 0; + }; + parseLines(filename, parse); return make_pair(graph, initial); } /* ************************************************************************* */ -Rot3 openGLFixedRotation() { // this is due to different convention for cameras in gtsam and openGL +Rot3 openGLFixedRotation() { // this is due to different convention for + // cameras in gtsam and openGL /* R = [ 1 0 0 * 0 -1 0 * 0 0 -1] @@ -721,7 +957,7 @@ Rot3 openGLFixedRotation() { // this is due to different convention for cameras } /* ************************************************************************* */ -Pose3 openGL2gtsam(const Rot3& R, double tx, double ty, double tz) { +Pose3 openGL2gtsam(const Rot3 &R, double tx, double ty, double tz) { Rot3 R90 = openGLFixedRotation(); Rot3 wRc = (R.inverse()).compose(R90); @@ -730,7 +966,7 @@ Pose3 openGL2gtsam(const Rot3& R, double tx, double ty, double tz) { } /* ************************************************************************* */ -Pose3 gtsam2openGL(const Rot3& R, double tx, double ty, double tz) { +Pose3 gtsam2openGL(const Rot3 &R, double tx, double ty, double tz) { Rot3 R90 = openGLFixedRotation(); Rot3 cRw_openGL = R90.compose(R.inverse()); Point3 t_openGL = cRw_openGL.rotate(Point3(-tx, -ty, -tz)); @@ -738,13 +974,13 @@ Pose3 gtsam2openGL(const Rot3& R, double tx, double ty, double tz) { } /* ************************************************************************* */ -Pose3 gtsam2openGL(const Pose3& PoseGTSAM) { +Pose3 gtsam2openGL(const Pose3 &PoseGTSAM) { return gtsam2openGL(PoseGTSAM.rotation(), PoseGTSAM.x(), PoseGTSAM.y(), - PoseGTSAM.z()); + PoseGTSAM.z()); } /* ************************************************************************* */ -bool readBundler(const string& filename, SfmData &data) { +bool readBundler(const string &filename, SfmData &data) { // Load the data file ifstream is(filename.c_str(), ifstream::in); if (!is) { @@ -779,7 +1015,7 @@ bool readBundler(const string& filename, SfmData &data) { // Check for all-zero R, in which case quit if (r11 == 0 && r12 == 0 && r13 == 0) { cout << "Error in readBundler: zero rotation matrix for pose " << i - << endl; + << endl; return false; } @@ -831,7 +1067,7 @@ bool readBundler(const string& filename, SfmData &data) { } /* ************************************************************************* */ -bool readBAL(const string& filename, SfmData &data) { +bool readBAL(const string &filename, SfmData &data) { // Load the data file ifstream is(filename.c_str(), ifstream::in); if (!is) { @@ -879,7 +1115,7 @@ bool readBAL(const string& filename, SfmData &data) { // Get the 3D position float x, y, z; is >> x >> y >> z; - SfmTrack& track = data.tracks[j]; + SfmTrack &track = data.tracks[j]; track.p = Point3(x, y, z); track.r = 0.4f; track.g = 0.4f; @@ -891,7 +1127,7 @@ bool readBAL(const string& filename, SfmData &data) { } /* ************************************************************************* */ -bool writeBAL(const string& filename, SfmData &data) { +bool writeBAL(const string &filename, SfmData &data) { // Open the output file ofstream os; os.open(filename.c_str()); @@ -909,29 +1145,32 @@ bool writeBAL(const string& filename, SfmData &data) { // Write observations os << data.number_cameras() << " " << data.number_tracks() << " " - << nrObservations << endl; + << nrObservations << endl; os << endl; for (size_t j = 0; j < data.number_tracks(); j++) { // for each 3D point j - const SfmTrack& track = data.tracks[j]; + const SfmTrack &track = data.tracks[j]; - for (size_t k = 0; k < track.number_measurements(); k++) { // for each observation of the 3D point j + for (size_t k = 0; k < track.number_measurements(); + k++) { // for each observation of the 3D point j size_t i = track.measurements[k].first; // camera id double u0 = data.cameras[i].calibration().u0(); double v0 = data.cameras[i].calibration().v0(); if (u0 != 0 || v0 != 0) { - cout - << "writeBAL has not been tested for calibration with nonzero (u0,v0)" - << endl; + cout << "writeBAL has not been tested for calibration with nonzero " + "(u0,v0)" + << endl; } - double pixelBALx = track.measurements[k].second.x() - u0; // center of image is the origin - double pixelBALy = -(track.measurements[k].second.y() - v0); // center of image is the origin + double pixelBALx = track.measurements[k].second.x() - + u0; // center of image is the origin + double pixelBALy = -(track.measurements[k].second.y() - + v0); // center of image is the origin Point2 pixelMeasurement(pixelBALx, pixelBALy); - os << i /*camera id*/<< " " << j /*point id*/<< " " - << pixelMeasurement.x() /*u of the pixel*/<< " " - << pixelMeasurement.y() /*v of the pixel*/<< endl; + os << i /*camera id*/ << " " << j /*point id*/ << " " + << pixelMeasurement.x() /*u of the pixel*/ << " " + << pixelMeasurement.y() /*v of the pixel*/ << endl; } } os << endl; @@ -964,46 +1203,47 @@ bool writeBAL(const string& filename, SfmData &data) { return true; } -bool writeBALfromValues(const string& filename, const SfmData &data, - Values& values) { +bool writeBALfromValues(const string &filename, const SfmData &data, + Values &values) { using Camera = PinholeCamera; SfmData dataValues = data; // Store poses or cameras in SfmData size_t nrPoses = values.count(); - if (nrPoses == dataValues.number_cameras()) { // we only estimated camera poses - for (size_t i = 0; i < dataValues.number_cameras(); i++) { // for each camera - Key poseKey = symbol('x', i); - Pose3 pose = values.at(poseKey); + if (nrPoses == + dataValues.number_cameras()) { // we only estimated camera poses + for (size_t i = 0; i < dataValues.number_cameras(); + i++) { // for each camera + Pose3 pose = values.at(X(i)); Cal3Bundler K = dataValues.cameras[i].calibration(); Camera camera(pose, K); dataValues.cameras[i] = camera; } } else { size_t nrCameras = values.count(); - if (nrCameras == dataValues.number_cameras()) { // we only estimated camera poses and calibration - for (size_t i = 0; i < nrCameras; i++) { // for each camera - Key cameraKey = i; // symbol('c',i); + if (nrCameras == dataValues.number_cameras()) { // we only estimated camera + // poses and calibration + for (size_t i = 0; i < nrCameras; i++) { // for each camera + Key cameraKey = i; // symbol('c',i); Camera camera = values.at(cameraKey); dataValues.cameras[i] = camera; } } else { - cout - << "writeBALfromValues: different number of cameras in SfM_dataValues (#cameras " - << dataValues.number_cameras() << ") and values (#cameras " - << nrPoses << ", #poses " << nrCameras << ")!!" - << endl; + cout << "writeBALfromValues: different number of cameras in " + "SfM_dataValues (#cameras " + << dataValues.number_cameras() << ") and values (#cameras " + << nrPoses << ", #poses " << nrCameras << ")!!" << endl; return false; } } // Store 3D points in SfmData - size_t nrPoints = values.count(), nrTracks = dataValues.number_tracks(); + size_t nrPoints = values.count(), + nrTracks = dataValues.number_tracks(); if (nrPoints != nrTracks) { - cout - << "writeBALfromValues: different number of points in SfM_dataValues (#points= " - << nrTracks << ") and values (#points " - << nrPoints << ")!!" << endl; + cout << "writeBALfromValues: different number of points in " + "SfM_dataValues (#points= " + << nrTracks << ") and values (#points " << nrPoints << ")!!" << endl; } for (size_t j = 0; j < nrTracks; j++) { // for each point @@ -1015,7 +1255,7 @@ bool writeBALfromValues(const string& filename, const SfmData &data, dataValues.tracks[j].r = 1.0; dataValues.tracks[j].g = 0.0; dataValues.tracks[j].b = 0.0; - dataValues.tracks[j].p = Point3(0,0,0); + dataValues.tracks[j].p = Point3(0, 0, 0); } } @@ -1023,22 +1263,40 @@ bool writeBALfromValues(const string& filename, const SfmData &data, return writeBAL(filename, dataValues); } -Values initialCamerasEstimate(const SfmData& db) { +Values initialCamerasEstimate(const SfmData &db) { Values initial; size_t i = 0; // NO POINTS: j = 0; - for(const SfmCamera& camera: db.cameras) + for (const SfmCamera &camera : db.cameras) initial.insert(i++, camera); return initial; } -Values initialCamerasAndPointsEstimate(const SfmData& db) { +Values initialCamerasAndPointsEstimate(const SfmData &db) { Values initial; size_t i = 0, j = 0; - for(const SfmCamera& camera: db.cameras) + for (const SfmCamera &camera : db.cameras) initial.insert((i++), camera); - for(const SfmTrack& track: db.tracks) + for (const SfmTrack &track : db.tracks) initial.insert(P(j++), track.p); return initial; } -} // \namespace gtsam +// To be deprecated when pybind wrapper is merged: +BetweenFactorPose3s +parse3DFactors(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model, size_t maxIndex) { + return parseFactors(filename, model, maxIndex); +} + +#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V41 +std::map parse3DPoses(const std::string &filename, + size_t maxIndex) { + return parseVariables(filename, maxIndex); +} + +std::map parse3DLandmarks(const std::string &filename, + size_t maxIndex) { + return parseVariables(filename, maxIndex); +} +#endif +} // namespace gtsam diff --git a/gtsam/slam/dataset.h b/gtsam/slam/dataset.h index 30fa913ba..53abe55ba 100644 --- a/gtsam/slam/dataset.h +++ b/gtsam/slam/dataset.h @@ -20,13 +20,12 @@ #pragma once +#include #include #include #include -#include -#include +#include #include -#include #include #include #include @@ -74,20 +73,42 @@ enum KernelFunctionType { KernelFunctionTypeNONE, KernelFunctionTypeHUBER, KernelFunctionTypeTUKEY }; -/// Return type for auxiliary functions -typedef std::pair IndexedPose; -typedef std::pair IndexedLandmark; -typedef std::pair, Pose2> IndexedEdge; - -#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V41 /** - * Parse TORO/G2O vertex "id x y yaw" - * @param is input stream - * @param tag string parsed from input stream, will only parse if vertex type + * Parse variables in a line-based text format (like g2o) into a map. + * Instantiated in .cpp Pose2, Point2, Pose3, and Point3. + * Note the map keys are integer indices, *not* gtsam::Keys. This is is + * different below where landmarks will use L(index) symbols. */ -GTSAM_EXPORT boost::optional parseVertex(std::istream& is, - const std::string& tag); -#endif +template +GTSAM_EXPORT std::map parseVariables(const std::string &filename, + size_t maxIndex = 0); + +/** + * Parse binary measurements in a line-based text format (like g2o) into a + * vector. Instantiated in .cpp for Pose2, Rot2, Pose3, and Rot3. The rotation + * versions parse poses and extract only the rotation part, using the marginal + * covariance as noise model. + */ +template +GTSAM_EXPORT std::vector> +parseMeasurements(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model = nullptr, + size_t maxIndex = 0); + +/** + * Parse BetweenFactors in a line-based text format (like g2o) into a vector of + * shared pointers. Instantiated in .cpp T equal to Pose2 and Pose3. + */ +template +GTSAM_EXPORT std::vector::shared_ptr> +parseFactors(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model = nullptr, + size_t maxIndex = 0); + +/// Return type for auxiliary functions +typedef std::pair IndexedPose; +typedef std::pair IndexedLandmark; +typedef std::pair, Pose2> IndexedEdge; /** * Parse TORO/G2O vertex "id x y yaw" @@ -102,7 +123,6 @@ GTSAM_EXPORT boost::optional parseVertexPose(std::istream& is, * @param is input stream * @param tag string parsed from input stream, will only parse if vertex type */ - GTSAM_EXPORT boost::optional parseVertexLandmark(std::istream& is, const std::string& tag); @@ -114,18 +134,21 @@ GTSAM_EXPORT boost::optional parseVertexLandmark(std::istream& GTSAM_EXPORT boost::optional parseEdge(std::istream& is, const std::string& tag); -/// Return type for load functions -typedef std::pair GraphAndValues; +/// Return type for load functions, which return a graph and initial values. For +/// landmarks, the gtsam::Symbol L(index) is used to insert into the Values. +/// Bearing-range measurements also refer to landmarks with L(index). +using GraphAndValues = + std::pair; /** * Load TORO 2D Graph * @param dataset/model pair as constructed by [dataset] - * @param maxID if non-zero cut out vertices >= maxID + * @param maxIndex if non-zero cut out vertices >= maxIndex * @param addNoise add noise to the edges * @param smart try to reduce complexity of covariance to cheapest model */ GTSAM_EXPORT GraphAndValues load2D( - std::pair dataset, int maxID = 0, + std::pair dataset, size_t maxIndex = 0, bool addNoise = false, bool smart = true, // NoiseFormat noiseFormat = NoiseFormatAUTO, @@ -135,7 +158,7 @@ GTSAM_EXPORT GraphAndValues load2D( * Load TORO/G2O style graph files * @param filename * @param model optional noise model to use instead of one specified by file - * @param maxID if non-zero cut out vertices >= maxID + * @param maxIndex if non-zero cut out vertices >= maxIndex * @param addNoise add noise to the edges * @param smart try to reduce complexity of covariance to cheapest model * @param noiseFormat how noise parameters are stored @@ -143,13 +166,13 @@ GTSAM_EXPORT GraphAndValues load2D( * @return graph and initial values */ GTSAM_EXPORT GraphAndValues load2D(const std::string& filename, - SharedNoiseModel model = SharedNoiseModel(), Key maxID = 0, bool addNoise = + SharedNoiseModel model = SharedNoiseModel(), size_t maxIndex = 0, bool addNoise = false, bool smart = true, NoiseFormat noiseFormat = NoiseFormatAUTO, // KernelFunctionType kernelFunctionType = KernelFunctionTypeNONE); /// @deprecated load2D now allows for arbitrary models and wrapping a robust kernel GTSAM_EXPORT GraphAndValues load2D_robust(const std::string& filename, - noiseModel::Base::shared_ptr& model, int maxID = 0); + noiseModel::Base::shared_ptr& model, size_t maxIndex = 0); /** save 2d graph */ GTSAM_EXPORT void save2D(const NonlinearFactorGraph& graph, @@ -173,21 +196,15 @@ GTSAM_EXPORT GraphAndValues readG2o(const std::string& g2oFile, const bool is3D * @param filename The name of the g2o file to write * @param graph NonlinearFactor graph storing the measurements * @param estimate Values + * + * Note:behavior change in PR #471: to be consistent with load2D and load3D, we + * write the *indices* to file and not the full Keys. This change really only + * affects landmarks, which get read as indices but stored in values with the + * symbol L(index). */ GTSAM_EXPORT void writeG2o(const NonlinearFactorGraph& graph, const Values& estimate, const std::string& filename); -/// Parse edges in 3D TORO graph file into a set of BetweenFactors. -using BetweenFactorPose3s = std::vector::shared_ptr>; -GTSAM_EXPORT BetweenFactorPose3s parse3DFactors(const std::string& filename, - const noiseModel::Diagonal::shared_ptr& corruptingNoise=nullptr); - -/// Parse vertices in 3D TORO/g2o graph file into a map of Pose3s. -GTSAM_EXPORT std::map parse3DPoses(const std::string& filename); - -/// Parse landmarks in 3D g2o graph file into a map of Point3s. -GTSAM_EXPORT std::map parse3DLandmarks(const string& filename); - /// Load TORO 3D Graph GTSAM_EXPORT GraphAndValues load3D(const std::string& filename); @@ -324,4 +341,24 @@ GTSAM_EXPORT Values initialCamerasEstimate(const SfmData& db); */ GTSAM_EXPORT Values initialCamerasAndPointsEstimate(const SfmData& db); +// To be deprecated when pybind wrapper is merged: +using BetweenFactorPose3s = std::vector::shared_ptr>; +GTSAM_EXPORT BetweenFactorPose3s +parse3DFactors(const std::string &filename, + const noiseModel::Diagonal::shared_ptr &model = nullptr, + size_t maxIndex = 0); + +#ifdef GTSAM_ALLOW_DEPRECATED_SINCE_V41 +inline boost::optional parseVertex(std::istream &is, + const std::string &tag) { + return parseVertexPose(is, tag); +} + +GTSAM_EXPORT std::map parse3DPoses(const std::string &filename, + size_t maxIndex = 0); + +GTSAM_EXPORT std::map +parse3DLandmarks(const std::string &filename, size_t maxIndex = 0); + +#endif } // namespace gtsam diff --git a/gtsam/slam/tests/testDataset.cpp b/gtsam/slam/tests/testDataset.cpp index b6b1776d2..aad9124c5 100644 --- a/gtsam/slam/tests/testDataset.cpp +++ b/gtsam/slam/tests/testDataset.cpp @@ -82,38 +82,73 @@ TEST( dataSet, parseEdge) const auto actual = parseEdge(is, tag); EXPECT(actual); if (actual) { - pair expected(0, 1); + pair expected(0, 1); EXPECT(expected == actual->first); EXPECT(assert_equal(Pose2(2, 3, 4), actual->second)); } } /* ************************************************************************* */ -TEST( dataSet, load2D) -{ +TEST(dataSet, load2D) { ///< The structure where we will save the SfM data const string filename = findExampleDataFile("w100.graph"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; boost::tie(graph, initial) = load2D(filename); - EXPECT_LONGS_EQUAL(300,graph->size()); - EXPECT_LONGS_EQUAL(100,initial->size()); - noiseModel::Unit::shared_ptr model = noiseModel::Unit::Create(3); - BetweenFactor expected(1, 0, Pose2(-0.99879,0.0417574,-0.00818381), model); - BetweenFactor::shared_ptr actual = boost::dynamic_pointer_cast< - BetweenFactor >(graph->at(0)); + EXPECT_LONGS_EQUAL(300, graph->size()); + EXPECT_LONGS_EQUAL(100, initial->size()); + auto model = noiseModel::Unit::Create(3); + BetweenFactor expected(1, 0, Pose2(-0.99879, 0.0417574, -0.00818381), + model); + BetweenFactor::shared_ptr actual = + boost::dynamic_pointer_cast>(graph->at(0)); EXPECT(assert_equal(expected, *actual)); + + // Check binary measurements, Pose2 + size_t maxIndex = 5; + auto measurements = parseMeasurements(filename, nullptr, maxIndex); + EXPECT_LONGS_EQUAL(5, measurements.size()); + + // Check binary measurements, Rot2 + auto measurements2 = parseMeasurements(filename); + EXPECT_LONGS_EQUAL(300, measurements2.size()); + + // // Check factor parsing + const auto actualFactors = parseFactors(filename); + for (size_t i : {0, 1, 2, 3, 4, 5}) { + EXPECT(assert_equal( + *boost::dynamic_pointer_cast>(graph->at(i)), + *actualFactors[i], 1e-5)); + } + + // Check pose parsing + const auto actualPoses = parseVariables(filename); + for (size_t j : {0, 1, 2, 3, 4}) { + EXPECT(assert_equal(initial->at(j), actualPoses.at(j), 1e-5)); + } + + // Check landmark parsing + const auto actualLandmarks = parseVariables(filename); + EXPECT_LONGS_EQUAL(0, actualLandmarks.size()); } /* ************************************************************************* */ -TEST( dataSet, load2DVictoriaPark) -{ +TEST(dataSet, load2DVictoriaPark) { const string filename = findExampleDataFile("victoria_park.txt"); NonlinearFactorGraph::shared_ptr graph; Values::shared_ptr initial; + + // Load all boost::tie(graph, initial) = load2D(filename); - EXPECT_LONGS_EQUAL(10608,graph->size()); - EXPECT_LONGS_EQUAL(7120,initial->size()); + EXPECT_LONGS_EQUAL(10608, graph->size()); + EXPECT_LONGS_EQUAL(7120, initial->size()); + + // Restrict keys + size_t maxIndex = 5; + boost::tie(graph, initial) = load2D(filename, nullptr, maxIndex); + EXPECT_LONGS_EQUAL(5, graph->size()); + EXPECT_LONGS_EQUAL(6, initial->size()); // file has 0 as well + EXPECT_LONGS_EQUAL(L(5), graph->at(4)->keys()[1]); } /* ************************************************************************* */ @@ -184,7 +219,7 @@ TEST(dataSet, readG2o3D) { } // Check factor parsing - const auto actualFactors = parse3DFactors(g2oFile); + const auto actualFactors = parseFactors(g2oFile); for (size_t i : {0, 1, 2, 3, 4, 5}) { EXPECT(assert_equal( *boost::dynamic_pointer_cast>(expectedGraph[i]), @@ -192,13 +227,13 @@ TEST(dataSet, readG2o3D) { } // Check pose parsing - const auto actualPoses = parse3DPoses(g2oFile); + const auto actualPoses = parseVariables(g2oFile); for (size_t j : {0, 1, 2, 3, 4}) { EXPECT(assert_equal(poses[j], actualPoses.at(j), 1e-5)); } // Check landmark parsing - const auto actualLandmarks = parse3DLandmarks(g2oFile); + const auto actualLandmarks = parseVariables(g2oFile); for (size_t j : {0, 1, 2, 3, 4}) { EXPECT(assert_equal(poses[j], actualPoses.at(j), 1e-5)); } @@ -259,7 +294,7 @@ TEST(dataSet, readG2oCheckDeterminants) { const string g2oFile = findExampleDataFile("toyExample.g2o"); // Check determinants in factors - auto factors = parse3DFactors(g2oFile); + auto factors = parseFactors(g2oFile); EXPECT_LONGS_EQUAL(6, factors.size()); for (const auto& factor : factors) { const Rot3 R = factor->measured().rotation(); @@ -267,13 +302,13 @@ TEST(dataSet, readG2oCheckDeterminants) { } // Check determinants in initial values - const map poses = parse3DPoses(g2oFile); + const map poses = parseVariables(g2oFile); EXPECT_LONGS_EQUAL(5, poses.size()); for (const auto& key_value : poses) { const Rot3 R = key_value.second.rotation(); EXPECT_DOUBLES_EQUAL(1.0, R.matrix().determinant(), 1e-9); } - const map landmarks = parse3DLandmarks(g2oFile); + const map landmarks = parseVariables(g2oFile); EXPECT_LONGS_EQUAL(0, landmarks.size()); } @@ -282,10 +317,13 @@ TEST(dataSet, readG2oLandmarks) { const string g2oFile = findExampleDataFile("example_with_vertices.g2o"); // Check number of poses and landmarks. Should be 8 each. - const map poses = parse3DPoses(g2oFile); + const map poses = parseVariables(g2oFile); EXPECT_LONGS_EQUAL(8, poses.size()); - const map landmarks = parse3DLandmarks(g2oFile); + const map landmarks = parseVariables(g2oFile); EXPECT_LONGS_EQUAL(8, landmarks.size()); + + auto graphAndValues = load3D(g2oFile); + EXPECT(graphAndValues.second->exists(L(0))); } /* ************************************************************************* */ @@ -538,14 +576,12 @@ TEST( dataSet, writeBALfromValues_Dubrovnik){ Values value; for(size_t i=0; i < readData.number_cameras(); i++){ // for each camera - Key poseKey = symbol('x',i); Pose3 pose = poseChange.compose(readData.cameras[i].pose()); - value.insert(poseKey, pose); + value.insert(X(i), pose); } for(size_t j=0; j < readData.number_tracks(); j++){ // for each point - Key pointKey = P(j); Point3 point = poseChange.transformFrom( readData.tracks[j].p ); - value.insert(pointKey, point); + value.insert(P(j), point); } // Write values and readData to a file @@ -570,13 +606,11 @@ TEST( dataSet, writeBALfromValues_Dubrovnik){ EXPECT(assert_equal(expected,actual,12)); Pose3 expectedPose = camera0.pose(); - Key poseKey = symbol('x',0); - Pose3 actualPose = value.at(poseKey); + Pose3 actualPose = value.at(X(0)); EXPECT(assert_equal(expectedPose,actualPose, 1e-7)); Point3 expectedPoint = track0.p; - Key pointKey = P(0); - Point3 actualPoint = value.at(pointKey); + Point3 actualPoint = value.at(P(0)); EXPECT(assert_equal(expectedPoint,actualPoint, 1e-6)); } diff --git a/tests/testNonlinearISAM.cpp b/tests/testNonlinearISAM.cpp index 974806612..4249f5bc4 100644 --- a/tests/testNonlinearISAM.cpp +++ b/tests/testNonlinearISAM.cpp @@ -308,7 +308,7 @@ TEST(testNonlinearISAM, loop_closures ) { // check if edge const auto betweenPose = parseEdge(is, tag); if (betweenPose) { - Key id1, id2; + size_t id1, id2; tie(id1, id2) = betweenPose->first; graph.emplace_shared >(Symbol('x', id2), Symbol('x', id1), betweenPose->second, model); diff --git a/timing/timeFrobeniusFactor.cpp b/timing/timeFrobeniusFactor.cpp index 8bdda968f..924213a33 100644 --- a/timing/timeFrobeniusFactor.cpp +++ b/timing/timeFrobeniusFactor.cpp @@ -57,8 +57,8 @@ int main(int argc, char* argv[]) { } // Read G2O file - const auto factors = parse3DFactors(g2oFile); - const auto poses = parse3DPoses(g2oFile); + const auto measurements = parseMeasurements(g2oFile); + const auto poses = parseVariables(g2oFile); // Build graph NonlinearFactorGraph graph; @@ -66,12 +66,12 @@ int main(int argc, char* argv[]) { auto priorModel = noiseModel::Isotropic::Sigma(6, 10000); graph.add(PriorFactor(0, SOn::identity(4), priorModel)); auto G = boost::make_shared(SOn::VectorizedGenerators(4)); - for (const auto& factor : factors) { - const auto& keys = factor->keys(); - const auto& Tij = factor->measured(); - const auto& model = factor->noiseModel(); + for (const auto &m : measurements) { + const auto &keys = m.keys(); + const Rot3 &Rij = m.measured(); + const auto &model = m.noiseModel(); graph.emplace_shared( - keys[0], keys[1], Rot3(Tij.rotation().matrix()), 4, model, G); + keys[0], keys[1], Rij, 4, model, G); } std::mt19937 rng(42);