50 SharedGaussian model_inlier_;
51 SharedGaussian model_outlier_;
54 double prior_outlier_;
56 bool flag_bump_up_near_zero_probs_;
59 GTSAM_CONCEPT_LIE_TYPE(T)GTSAM_CONCEPT_TESTABLE_TYPE(T)
64 typedef typename boost::shared_ptr<BetweenFactorEM> shared_ptr;
72 const SharedGaussian& model_inlier,
const SharedGaussian& model_outlier,
73 const double prior_inlier,
const double prior_outlier,
74 const bool flag_bump_up_near_zero_probs =
false) :
75 Base(cref_list_of<2>(key1)(key2)), key1_(key1), key2_(key2), measured_(
76 measured), model_inlier_(model_inlier), model_outlier_(model_outlier), prior_inlier_(
77 prior_inlier), prior_outlier_(prior_outlier), flag_bump_up_near_zero_probs_(
78 flag_bump_up_near_zero_probs) {
87 virtual void print(
const std::string& s,
const KeyFormatter& keyFormatter =
88 DefaultKeyFormatter)
const {
89 std::cout << s <<
"BetweenFactorEM(" << keyFormatter(key1_) <<
"," 90 << keyFormatter(key2_) <<
")\n";
91 measured_.print(
" measured: ");
92 model_inlier_->print(
" noise model inlier: ");
93 model_outlier_->print(
" noise model outlier: ");
94 std::cout <<
"(prior_inlier, prior_outlier_) = (" << prior_inlier_ <<
"," 95 << prior_outlier_ <<
")\n";
101 const This *t = dynamic_cast<const This*>(&f);
104 return key1_ == t->key1_ && key2_ == t->key2_
108 prior_outlier_ == t->prior_outlier_
109 && prior_inlier_ == t->prior_inlier_ && measured_.
equals(t->measured_);
117 virtual double error(
const Values& x)
const {
118 return whitenedError(x).squaredNorm();
128 virtual boost::shared_ptr<GaussianFactor> linearize(
132 return boost::shared_ptr<JacobianFactor>();
136 std::vector<Matrix> A(this->size());
137 Vector b = -whitenedError(x, A);
147 Vector whitenedError(
const Values& x,
148 boost::optional<std::vector<Matrix>&> H = boost::none)
const {
152 const T& p1 = x.
at<T>(key1_);
153 const T& p2 = x.
at<T>(key2_);
157 T hx = p1.between(p2, H1, H2);
160 Vector err = measured_.localCoordinates(hx);
163 Vector p_inlier_outlier = calcIndicatorProb(x);
164 double p_inlier = p_inlier_outlier[0];
165 double p_outlier = p_inlier_outlier[1];
167 Vector err_wh_inlier = model_inlier_->whiten(err);
168 Vector err_wh_outlier = model_outlier_->whiten(err);
170 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
171 Matrix invCov_outlier = model_outlier_->R().transpose()
172 * model_outlier_->R();
175 err_wh_eq.resize(err_wh_inlier.rows() * 2);
176 err_wh_eq << sqrt(p_inlier) * err_wh_inlier.array(), sqrt(p_outlier)
177 * err_wh_outlier.array();
182 Matrix H1_inlier = sqrt(p_inlier) * model_inlier_->Whiten(H1);
183 Matrix H1_outlier = sqrt(p_outlier) * model_outlier_->Whiten(H1);
184 Matrix H1_aug =
stack(2, &H1_inlier, &H1_outlier);
186 Matrix H2_inlier = sqrt(p_inlier) * model_inlier_->Whiten(H2);
187 Matrix H2_outlier = sqrt(p_outlier) * model_outlier_->Whiten(H2);
188 Matrix H2_aug =
stack(2, &H2_inlier, &H2_outlier);
190 (*H)[0].resize(H1_aug.rows(), H1_aug.cols());
191 (*H)[1].resize(H2_aug.rows(), H2_aug.cols());
232 Vector calcIndicatorProb(
const Values& x)
const {
236 Vector err = unwhitenedError(x);
239 Vector err_wh_inlier = model_inlier_->whiten(err);
240 Vector err_wh_outlier = model_outlier_->whiten(err);
242 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
243 Matrix invCov_outlier = model_outlier_->R().transpose()
244 * model_outlier_->R();
246 double p_inlier = prior_inlier_ * std::sqrt(invCov_inlier.determinant())
247 * exp(-0.5 * err_wh_inlier.dot(err_wh_inlier));
248 double p_outlier = prior_outlier_ * std::sqrt(invCov_outlier.determinant())
249 * exp(-0.5 * err_wh_outlier.dot(err_wh_outlier));
252 std::cout <<
"in calcIndicatorProb. err_unwh: " << err[0] <<
", " 253 << err[1] <<
", " << err[2] << std::endl;
254 std::cout <<
"in calcIndicatorProb. err_wh_inlier: " << err_wh_inlier[0]
255 <<
", " << err_wh_inlier[1] <<
", " << err_wh_inlier[2] << std::endl;
256 std::cout <<
"in calcIndicatorProb. err_wh_inlier.dot(err_wh_inlier): " 257 << err_wh_inlier.dot(err_wh_inlier) << std::endl;
258 std::cout <<
"in calcIndicatorProb. err_wh_outlier.dot(err_wh_outlier): " 259 << err_wh_outlier.dot(err_wh_outlier) << std::endl;
262 <<
"in calcIndicatorProb. p_inlier, p_outlier before normalization: " 263 << p_inlier <<
", " << p_outlier << std::endl;
266 double sumP = p_inlier + p_outlier;
270 if (flag_bump_up_near_zero_probs_) {
273 if (p_inlier < minP || p_outlier < minP) {
276 if (p_outlier < minP)
278 sumP = p_inlier + p_outlier;
284 return (Vector(2) << p_inlier, p_outlier).finished();
288 Vector unwhitenedError(
const Values& x)
const {
290 const T& p1 = x.
at<T>(key1_);
291 const T& p2 = x.
at<T>(key2_);
295 T hx = p1.between(p2, H1, H2);
297 return measured_.localCoordinates(hx);
301 void set_flag_bump_up_near_zero_probs(
bool flag) {
302 flag_bump_up_near_zero_probs_ = flag;
306 bool get_flag_bump_up_near_zero_probs()
const {
307 return flag_bump_up_near_zero_probs_;
311 SharedGaussian get_model_inlier()
const {
312 return model_inlier_;
316 SharedGaussian get_model_outlier()
const {
317 return model_outlier_;
321 Matrix get_model_inlier_cov()
const {
322 return (model_inlier_->R().transpose() * model_inlier_->R()).inverse();
326 Matrix get_model_outlier_cov()
const {
327 return (model_outlier_->R().transpose() * model_outlier_->R()).inverse();
331 void updateNoiseModels(
const Values& values,
344 Keys.push_back(key1_);
345 Keys.push_back(key2_);
346 Marginals marginals(graph, values, Marginals::QR);
348 Matrix cov1 = joint_marginal12(key1_, key1_);
349 Matrix cov2 = joint_marginal12(key2_, key2_);
350 Matrix cov12 = joint_marginal12(key1_, key2_);
352 updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
356 void updateNoiseModels_givenCovs(
const Values& values,
357 const Matrix& cov1,
const Matrix& cov2,
const Matrix& cov12) {
367 const T& p1 = values.
at<T>(key1_);
368 const T& p2 = values.
at<T>(key2_);
371 p1.between(p2, H1, H2);
374 H.resize(H1.rows(), H1.rows() + H2.rows());
378 joint_cov.resize(cov1.rows() + cov2.rows(), cov1.cols() + cov2.cols());
379 joint_cov << cov1, cov12, cov12.transpose(), cov2;
381 Matrix cov_state = H * joint_cov * H.transpose();
387 (model_inlier_->R().transpose() * model_inlier_->R()).inverse();
389 covRinlier + cov_state);
392 (model_outlier_->R().transpose() * model_outlier_->R()).inverse();
394 covRoutlier + cov_state);
402 const VALUE& measured()
const {
407 std::size_t size()
const {
411 virtual size_t dim()
const {
412 return model_inlier_->R().rows() + model_inlier_->R().cols();
419 template<
class ARCHIVE>
420 void serialize(ARCHIVE & ar,
const unsigned int ) {
422 & boost::serialization::make_nvp(
"NonlinearFactor",
423 boost::serialization::base_object<Base>(*
this));
424 ar & BOOST_SERIALIZATION_NVP(measured_);
JointMarginal jointMarginalCovariance(const KeyVector &variables) const
Compute the joint marginal covariance of several variables.
Definition: Marginals.cpp:83
This is the base class for all factor types.
Definition: Factor.h:54
A non-templated config holding any types of Manifold-group elements.
Definition: Values.h:70
ValueType at(Key j) const
Retrieve a variable by key j.
Definition: Values-inl.h:342
A class to store and access a joint marginal, returned from Marginals::jointMarginalCovariance and Ma...
Definition: Marginals.h:89
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:57
friend class boost::serialization::access
Serialization function.
Definition: BetweenFactorEM.h:418
Template to create a binary predicate.
Definition: Testable.h:110
Definition: BetweenFactorEM.h:34
virtual bool equals(const NonlinearFactor &f, double tol=1e-9) const
Check if two factors are equal.
Definition: NonlinearFactor.cpp:36
boost::function< std::string(Key)> KeyFormatter
Typedef for a function to format a key, i.e. to convert it to a string.
Definition: Key.h:33
Nonlinear factor base class.
Definition: NonlinearFactor.h:50
A class for computing marginals in a NonlinearFactorGraph.
static shared_ptr Create(size_t dim)
Create a unit covariance noise model.
Definition: NoiseModel.h:601
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:56
Non-linear factor base classes.
A non-linear factor graph is a graph of non-Gaussian, i.e.
Definition: NonlinearFactorGraph.h:77
A Gaussian factor in the squared-error form.
Definition: JacobianFactor.h:87
Matrix stack(size_t nrMatrices,...)
create a matrix by stacking other matrices Given a set of matrices: A1, A2, A3...
Definition: Matrix.cpp:392
virtual bool active(const Values &) const
Checks whether a factor should be used based on a set of values.
Definition: NonlinearFactor.h:113
A factor with a quadratic error function - a Gaussian.
Global functions in a separate testing namespace.
Definition: chartTesting.h:28
Concept check for values that can be used in unit tests.
A class for computing Gaussian marginals of variables in a NonlinearFactorGraph.
Definition: Marginals.h:32
static shared_ptr Covariance(const Matrix &covariance, bool smart=true)
A Gaussian noise model created by specifying a covariance matrix.
Definition: NoiseModel.cpp:112
Base class and basic functions for Lie types.
bool equals(const This &other, double tol=1e-9) const
check equality
Definition: Factor.cpp:42
boost::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactor.h:42