update python tests

release/4.3a0
Varun Agrawal 2022-01-31 14:08:13 -05:00
parent b93145cd89
commit 042c236123
12 changed files with 63 additions and 79 deletions

View File

@ -41,10 +41,22 @@ class DSFMap {
std::map<KEY, This::Set> sets();
};
class IndexPairVector {
IndexPairVector();
IndexPairVector(const gtsam::IndexPairVector& other);
// common STL methods
size_t size() const;
bool empty() const;
void clear();
// structure specific methods
gtsam::IndexPair at(size_t i) const;
void push_back(gtsam::IndexPair key) const;
};
gtsam::IndexPairVector IndexPairSetAsArray(gtsam::IndexPairSet& set);
#include <gtsam/base/Matrix.h>
bool linear_independent(Matrix A, Matrix B, double tol);

View File

@ -17,9 +17,8 @@ Date: September 2020
from collections import defaultdict
from typing import List, Tuple
import numpy as np
import gtsam
import numpy as np
from gtsam.examples import SFMdata
# Hyperparameters for 1dsfm, values used from Kyle Wilson's code.
@ -59,7 +58,7 @@ def get_data() -> Tuple[gtsam.Values, List[gtsam.BinaryMeasurementUnit3]]:
return wRc_values, i_iZj_list
def filter_outliers(w_iZj_list: gtsam.BinaryMeasurementsUnit3) -> gtsam.BinaryMeasurementsUnit3:
def filter_outliers(w_iZj_list: List[gtsam.BinaryMeasurementUnit3]) -> List[gtsam.BinaryMeasurementUnit3]:
"""Removes outliers from a list of Unit3 measurements that are the
translation directions from camera i to camera j in the world frame."""
@ -89,14 +88,14 @@ def filter_outliers(w_iZj_list: gtsam.BinaryMeasurementsUnit3) -> gtsam.BinaryMe
avg_outlier_weights[keypair] += weight / len(outlier_weights)
# Remove w_iZj that have weight greater than threshold, these are outliers.
w_iZj_inliers = gtsam.BinaryMeasurementsUnit3()
w_iZj_inliers = []
[w_iZj_inliers.append(w_iZj) for w_iZj in w_iZj_list if avg_outlier_weights[(
w_iZj.key1(), w_iZj.key2())] < OUTLIER_WEIGHT_THRESHOLD]
return w_iZj_inliers
def estimate_poses(i_iZj_list: gtsam.BinaryMeasurementsUnit3,
def estimate_poses(i_iZj_list: List[gtsam.BinaryMeasurementUnit3],
wRc_values: gtsam.Values) -> gtsam.Values:
"""Estimate poses given rotations and normalized translation directions between cameras.
@ -112,7 +111,7 @@ def estimate_poses(i_iZj_list: gtsam.BinaryMeasurementsUnit3,
"""
# Convert the translation direction measurements to world frame using the rotations.
w_iZj_list = gtsam.BinaryMeasurementsUnit3()
w_iZj_list = []
for i_iZj in i_iZj_list:
w_iZj = gtsam.Unit3(wRc_values.atRot3(i_iZj.key1())
.rotate(i_iZj.measured().point3()))

View File

@ -1,4 +1,4 @@
# This trick is to allow direct import of sub-modules
# without this, we can only do `from gtsam.gtsam.symbol_shorthand import X`
# with this trick, we can do `from gtsam.symbol_shorthand import X`
from .gtsam.symbol_shorthand import *
from .gtsam.symbol_shorthand import *

View File

@ -11,11 +11,10 @@ Refactored: Roderick Koehle
"""
import unittest
import numpy as np
import gtsam
from gtsam.utils.test_case import GtsamTestCase
import numpy as np
from gtsam.symbol_shorthand import K, L, P
from gtsam.utils.test_case import GtsamTestCase
def ulp(ftype=np.float64):
@ -51,9 +50,9 @@ class TestCal3Fisheye(GtsamTestCase):
camera1 = gtsam.PinholeCameraCal3Fisheye(pose1)
camera2 = gtsam.PinholeCameraCal3Fisheye(pose2)
cls.origin = np.array([0.0, 0.0, 0.0])
cls.poses = gtsam.Pose3Vector([pose1, pose2])
cls.cameras = gtsam.CameraSetCal3Fisheye([camera1, camera2])
cls.measurements = gtsam.Point2Vector([k.project(cls.origin) for k in cls.cameras])
cls.poses = [pose1, pose2]
cls.cameras = [camera1, camera2]
cls.measurements = [k.project(cls.origin) for k in cls.cameras]
def test_Cal3Fisheye(self):
K = gtsam.Cal3Fisheye()
@ -187,7 +186,7 @@ class TestCal3Fisheye(GtsamTestCase):
def test_triangulation_rectify(self):
"""Estimate spatial point from image measurements using rectification"""
rectified = gtsam.Point2Vector([k.calibration().calibrate(pt) for k, pt in zip(self.cameras, self.measurements)])
rectified = [k.calibration().calibrate(pt) for k, pt in zip(self.cameras, self.measurements)]
shared_cal = gtsam.Cal3_S2()
triangulated = gtsam.triangulatePoint3(self.poses, shared_cal, rectified, rank_tol=1e-9, optimize=False)
self.gtsamAssertEquals(triangulated, self.origin)

View File

@ -10,11 +10,10 @@ Author: Frank Dellaert & Duy Nguyen Ta (Python)
"""
import unittest
import numpy as np
import gtsam
from gtsam.utils.test_case import GtsamTestCase
import numpy as np
from gtsam.symbol_shorthand import K, L, P
from gtsam.utils.test_case import GtsamTestCase
class TestCal3Unified(GtsamTestCase):
@ -48,9 +47,9 @@ class TestCal3Unified(GtsamTestCase):
camera1 = gtsam.PinholeCameraCal3Unified(pose1, cls.stereographic)
camera2 = gtsam.PinholeCameraCal3Unified(pose2, cls.stereographic)
cls.origin = np.array([0.0, 0.0, 0.0])
cls.poses = gtsam.Pose3Vector([pose1, pose2])
cls.cameras = gtsam.CameraSetCal3Unified([camera1, camera2])
cls.measurements = gtsam.Point2Vector([k.project(cls.origin) for k in cls.cameras])
cls.poses = [pose1, pose2]
cls.cameras = [camera1, camera2]
cls.measurements = [k.project(cls.origin) for k in cls.cameras]
def test_Cal3Unified(self):
K = gtsam.Cal3Unified()
@ -147,7 +146,7 @@ class TestCal3Unified(GtsamTestCase):
def test_triangulation_rectify(self):
"""Estimate spatial point from image measurements using rectification"""
rectified = gtsam.Point2Vector([k.calibration().calibrate(pt) for k, pt in zip(self.cameras, self.measurements)])
rectified = [k.calibration().calibrate(pt) for k, pt in zip(self.cameras, self.measurements)]
shared_cal = gtsam.Cal3_S2()
triangulated = gtsam.triangulatePoint3(self.poses, shared_cal, rectified, rank_tol=1e-9, optimize=False)
self.gtsamAssertEquals(triangulated, self.origin)

View File

@ -13,7 +13,8 @@ Author: Frank Dellaert
import unittest
from gtsam import DecisionTreeFactor, DiscreteValues, DiscreteDistribution, Ordering
from gtsam import (DecisionTreeFactor, DiscreteDistribution, DiscreteValues,
Ordering)
from gtsam.utils.test_case import GtsamTestCase

View File

@ -31,7 +31,7 @@ class TestKarcherMean(GtsamTestCase):
def test_find(self):
# Check that optimizing for Karcher mean (which minimizes Between distance)
# gets correct result.
rotations = gtsam.Rot3Vector([R, R.inverse()])
rotations = [R, R.inverse()]
expected = Rot3()
actual = gtsam.FindKarcherMean(rotations)
self.gtsamAssertEquals(expected, actual)
@ -42,7 +42,7 @@ class TestKarcherMean(GtsamTestCase):
a2Rb2 = Rot3()
a3Rb3 = Rot3()
aRb_list = gtsam.Rot3Vector([a1Rb1, a2Rb2, a3Rb3])
aRb_list = [a1Rb1, a2Rb2, a3Rb3]
aRb_expected = Rot3()
aRb = gtsam.FindKarcherMean(aRb_list)
@ -58,9 +58,7 @@ class TestKarcherMean(GtsamTestCase):
graph = gtsam.NonlinearFactorGraph()
R12 = R.compose(R.compose(R))
graph.add(gtsam.BetweenFactorRot3(1, 2, R12, MODEL))
keys = gtsam.KeyVector()
keys.append(1)
keys.append(2)
keys = [1, 2]
graph.add(gtsam.KarcherMeanFactorRot3(keys))
initial = gtsam.Values()
@ -69,8 +67,7 @@ class TestKarcherMean(GtsamTestCase):
expected = Rot3()
result = gtsam.GaussNewtonOptimizer(graph, initial).optimize()
actual = gtsam.FindKarcherMean(
gtsam.Rot3Vector([result.atRot3(1), result.atRot3(2)]))
actual = gtsam.FindKarcherMean([result.atRot3(1), result.atRot3(2)])
self.gtsamAssertEquals(expected, actual)
self.gtsamAssertEquals(
R12, result.atRot3(1).between(result.atRot3(2)))

View File

@ -10,10 +10,9 @@ Author: Frank Dellaert & Duy Nguyen Ta & John Lambert
"""
import unittest
import numpy as np
import gtsam
from gtsam import Point2, Point2Pairs, Pose2
import numpy as np
from gtsam import Point2, Pose2
from gtsam.utils.test_case import GtsamTestCase
@ -55,7 +54,7 @@ class TestPose2(GtsamTestCase):
]
# fmt: on
ab_pairs = Point2Pairs(list(zip(pts_a, pts_b)))
ab_pairs = list(zip(pts_a, pts_b))
bTa = gtsam.align(ab_pairs)
aTb = bTa.inverse()
assert aTb is not None

View File

@ -14,16 +14,9 @@ import unittest
import gtsam
import numpy as np
from gtsam import (
BetweenFactorPose2,
LevenbergMarquardtParams,
Rot2,
Pose2,
ShonanAveraging2,
ShonanAveragingParameters2,
ShonanAveraging3,
ShonanAveragingParameters3,
)
from gtsam import (BetweenFactorPose2, LevenbergMarquardtParams, Pose2, Rot2,
ShonanAveraging2, ShonanAveraging3,
ShonanAveragingParameters2, ShonanAveragingParameters3)
from gtsam.utils.test_case import GtsamTestCase
DEFAULT_PARAMS = ShonanAveragingParameters3(
@ -183,7 +176,7 @@ class TestShonanAveraging(GtsamTestCase):
shonan_params.setCertifyOptimality(True)
noise_model = gtsam.noiseModel.Unit.Create(3)
between_factors = gtsam.BetweenFactorPose2s()
between_factors = []
for (i1, i2), i2Ri1 in i2Ri1_dict.items():
i2Ti1 = Pose2(i2Ri1, np.zeros(2))
between_factors.append(

View File

@ -12,10 +12,9 @@ Author: John Lambert
import unittest
from typing import List, Optional
import numpy as np
import gtsam
from gtsam import Point3, Pose3, Pose3Pairs, Rot3, Similarity3
import numpy as np
from gtsam import Point3, Pose3, Rot3, Similarity3
from gtsam.utils.test_case import GtsamTestCase
@ -49,7 +48,7 @@ class TestSim3(GtsamTestCase):
wToi_list = [wTo0, wTo1, wTo2]
we_pairs = Pose3Pairs(list(zip(wToi_list, eToi_list)))
we_pairs = list(zip(wToi_list, eToi_list))
# Recover the transformation wSe (i.e. world_S_egovehicle)
wSe = Similarity3.Align(we_pairs)
@ -84,7 +83,7 @@ class TestSim3(GtsamTestCase):
wToi_list = [wTo0, wTo1, wTo2]
we_pairs = Pose3Pairs(list(zip(wToi_list, eToi_list)))
we_pairs = list(zip(wToi_list, eToi_list))
# Recover the transformation wSe (i.e. world_S_egovehicle)
wSe = Similarity3.Align(we_pairs)
@ -122,7 +121,7 @@ class TestSim3(GtsamTestCase):
bTi_list = [bTi0, bTi1, bTi2, bTi3]
ab_pairs = Pose3Pairs(list(zip(aTi_list, bTi_list)))
ab_pairs = list(zip(aTi_list, bTi_list))
# Recover the transformation wSe (i.e. world_S_egovehicle)
aSb = Similarity3.Align(ab_pairs)
@ -689,7 +688,7 @@ def align_poses_sim3(aTi_list: List[Pose3], bTi_list: List[Pose3]) -> Similarity
assert len(aTi_list) == len(bTi_list)
assert n_to_align >= 2, "SIM(3) alignment uses at least 2 frames"
ab_pairs = Pose3Pairs(list(zip(aTi_list, bTi_list)))
ab_pairs = list(zip(aTi_list, bTi_list))
aSb = Similarity3.Align(ab_pairs)

View File

@ -1,9 +1,9 @@
from __future__ import print_function
import numpy as np
import unittest
import gtsam
import numpy as np
""" Returns example pose values of 3 points A, B and C in the world frame """
def ExampleValues():
@ -19,7 +19,7 @@ def ExampleValues():
""" Returns binary measurements for the points in the given edges."""
def SimulateMeasurements(gt_poses, graph_edges):
measurements = gtsam.BinaryMeasurementsUnit3()
measurements = []
for edge in graph_edges:
Ta = gt_poses.atPose3(edge[0]).translation()
Tb = gt_poses.atPose3(edge[1]).translation()
@ -34,7 +34,7 @@ class TestTranslationRecovery(unittest.TestCase):
def test_constructor(self):
"""Construct from binary measurements."""
algorithm = gtsam.TranslationRecovery(gtsam.BinaryMeasurementsUnit3())
algorithm = gtsam.TranslationRecovery([])
self.assertIsInstance(algorithm, gtsam.TranslationRecovery)
def test_run(self):

View File

@ -11,23 +11,11 @@ Authors: Frank Dellaert & Fan Jiang (Python) & Sushmita Warrier & John Lambert
import unittest
from typing import Iterable, List, Optional, Tuple, Union
import numpy as np
import gtsam
from gtsam import (
Cal3_S2,
Cal3Bundler,
CameraSetCal3_S2,
CameraSetCal3Bundler,
PinholeCameraCal3_S2,
PinholeCameraCal3Bundler,
Point2,
Point2Vector,
Point3,
Pose3,
Pose3Vector,
Rot3,
)
import numpy as np
from gtsam import (Cal3_S2, Cal3Bundler, CameraSetCal3_S2,
CameraSetCal3Bundler, PinholeCameraCal3_S2,
PinholeCameraCal3Bundler, Point2, Point3, Pose3, Rot3)
from gtsam.utils.test_case import GtsamTestCase
UPRIGHT = Rot3.Ypr(-np.pi / 2, 0.0, -np.pi / 2)
@ -44,9 +32,7 @@ class TestTriangulationExample(GtsamTestCase):
# create second camera 1 meter to the right of first camera
pose2 = pose1.compose(Pose3(Rot3(), Point3(1, 0, 0)))
# twoPoses
self.poses = Pose3Vector()
self.poses.append(pose1)
self.poses.append(pose2)
self.poses = [pose1, pose2]
# landmark ~5 meters infront of camera
self.landmark = Point3(5, 0.5, 1.2)
@ -58,7 +44,7 @@ class TestTriangulationExample(GtsamTestCase):
cal_params: Iterable[Iterable[Union[int, float]]],
camera_set: Optional[Union[CameraSetCal3Bundler,
CameraSetCal3_S2]] = None,
) -> Tuple[Point2Vector, Union[CameraSetCal3Bundler, CameraSetCal3_S2,
) -> Tuple[List[Point2], Union[CameraSetCal3Bundler, CameraSetCal3_S2,
List[Cal3Bundler], List[Cal3_S2]]]:
"""
Generate vector of measurements for given calibration and camera model.
@ -76,7 +62,7 @@ class TestTriangulationExample(GtsamTestCase):
cameras = camera_set()
else:
cameras = []
measurements = Point2Vector()
measurements = []
for k, pose in zip(cal_params, self.poses):
K = calibration(*k)
@ -105,7 +91,7 @@ class TestTriangulationExample(GtsamTestCase):
self.gtsamAssertEquals(self.landmark, triangulated_landmark, 1e-9)
# Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
measurements_noisy = Point2Vector()
measurements_noisy = []
measurements_noisy.append(measurements[0] - np.array([0.1, 0.5]))
measurements_noisy.append(measurements[1] - np.array([-0.2, 0.3]))
@ -172,8 +158,8 @@ class TestTriangulationExample(GtsamTestCase):
z2: Point2 = camera2.project(landmark)
z3: Point2 = camera3.project(landmark)
poses = gtsam.Pose3Vector([pose1, pose2, pose3])
measurements = Point2Vector([z1, z2, z3])
poses = [pose1, pose2, pose3]
measurements = [z1, z2, z3]
# noise free, so should give exactly the landmark
actual = gtsam.triangulatePoint3(poses,