v1. Object ID/data association works. In this version PCL based unsupervised clustering is done separately.

master
Praveen Palanisamy 2015-12-07 17:20:12 -05:00
commit 5d4e3c8d08
8 changed files with 933 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
*.pro
*~
*.pro.user

170
CMakeLists.txt Normal file
View File

@ -0,0 +1,170 @@
cmake_minimum_required(VERSION 2.8.3)
project(kf_tracker)
set(CMAKE_CXX_FLAGS "-std=c++0x ${CMAKE_CXX_FLAGS}")
## Find catkin macros and libraries
## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
## is used, also find other catkin packages
find_package(catkin REQUIRED COMPONENTS
pcl_ros
roscpp
sensor_msgs
)
find_package( OpenCV REQUIRED )
## System dependencies are found with CMake's conventions
# find_package(Boost REQUIRED COMPONENTS system)
## Uncomment this if the package has a setup.py. This macro ensures
## modules and global scripts declared therein get installed
## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
# catkin_python_setup()
################################################
## Declare ROS messages, services and actions ##
################################################
## To declare and build messages, services or actions from within this
## package, follow these steps:
## * Let MSG_DEP_SET be the set of packages whose message types you use in
## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
## * In the file package.xml:
## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
## * If MSG_DEP_SET isn't empty the following dependencies might have been
## pulled in transitively but can be declared for certainty nonetheless:
## * add a build_depend tag for "message_generation"
## * add a run_depend tag for "message_runtime"
## * In this file (CMakeLists.txt):
## * add "message_generation" and every package in MSG_DEP_SET to
## find_package(catkin REQUIRED COMPONENTS ...)
## * add "message_runtime" and every package in MSG_DEP_SET to
## catkin_package(CATKIN_DEPENDS ...)
## * uncomment the add_*_files sections below as needed
## and list every .msg/.srv/.action file to be processed
## * uncomment the generate_messages entry below
## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
## Generate messages in the 'msg' folder
# add_message_files(
# FILES
# Message1.msg
# Message2.msg
# )
## Generate services in the 'srv' folder
# add_service_files(
# FILES
# Service1.srv
# Service2.srv
# )
## Generate actions in the 'action' folder
# add_action_files(
# FILES
# Action1.action
# Action2.action
# )
## Generate added messages and services with any dependencies listed here
# generate_messages(
# DEPENDENCIES
# sensor_msgs
# )
###################################
## catkin specific configuration ##
###################################
## The catkin_package macro generates cmake config files for your package
## Declare things to be passed to dependent projects
## INCLUDE_DIRS: uncomment this if you package contains header files
## LIBRARIES: libraries you create in this project that dependent projects also need
## CATKIN_DEPENDS: catkin_packages dependent projects also need
## DEPENDS: system dependencies of this project that dependent projects also need
catkin_package(
# INCLUDE_DIRS include
# LIBRARIES kf_tracker
# CATKIN_DEPENDS pck_ros roscpp sensor_msgs
# DEPENDS system_lib
)
###########
## Build ##
###########
## Specify additional locations of header files
## Your package locations should be listed before other locations
# include_directories(include)
include_directories(
${catkin_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
include
)
## Declare a cpp library
# add_library(kf_tracker
# src/${PROJECT_NAME}/kf_tracker.cpp
# )
## Declare a cpp executable
# add_executable(kf_tracker_node src/kf_tracker_node.cpp)
add_executable( tracker src/main.cpp )
target_link_libraries ( tracker ${OpenCV_LIBRARIES} ${catkin_LIBRARIES})
## Add cmake target dependencies of the executable/library
## as an example, message headers may need to be generated before nodes
# add_dependencies(kf_tracker_node kf_tracker_generate_messages_cpp)
## Specify libraries to link a library or executable target against
# target_link_libraries(kf_tracker_node
# ${catkin_LIBRARIES}
# )
#############
## Install ##
#############
# all install targets should use catkin DESTINATION variables
# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
## Mark executable scripts (Python etc.) for installation
## in contrast to setup.py, you can choose the destination
# install(PROGRAMS
# scripts/my_python_script
# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
# )
## Mark executables and/or libraries for installation
# install(TARGETS kf_tracker kf_tracker_node
# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
# )
## Mark cpp header files for installation
# install(DIRECTORY include/${PROJECT_NAME}/
# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
# FILES_MATCHING PATTERN "*.h"
# PATTERN ".svn" EXCLUDE
# )
## Mark other files for installation (e.g. launch and bag files, etc.)
# install(FILES
# # myfile1
# # myfile2
# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
# )
#############
## Testing ##
#############
## Add gtest based cpp test target and link libraries
# catkin_add_gtest(${PROJECT_NAME}-test test/test_kf_tracker.cpp)
# if(TARGET ${PROJECT_NAME}-test)
# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
# endif()
## Add folders to be run by python nosetests
# catkin_add_nosetests(test)

View File

@ -0,0 +1,23 @@
#include <iostream>
#include <vector>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/video/tracking.hpp>
using namespace cv;
using namespace std;
class CKalmanFilter
{
public:
CKalmanFilter(vector<Vec2f>);
~CKalmanFilter();
vector<Vec2f> predict();
vector<Vec2f> update(vector<Vec2f>);
KalmanFilter* kalman;
vector<Vec2f> prevResult;
};

View File

@ -0,0 +1,36 @@
#include <iostream>
#include <vector>
#include <fstream>
#include <string>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/video/tracking.hpp>
using namespace cv;
using namespace std;
class featureDetection{
public:
featureDetection();
~featureDetection();
void filteringPipeLine(Mat);
vector<Vec2f> houghTransform();
Mat lineItr(Mat,vector<Vec2f>, string);
int _width, _height;
protected:
bool findIntersection(vector<Point>, Point&);
vector<Point2f> ransac(vector<Point2f>);
void visualize(Mat);
vector<Vec2f> _lines;
ofstream myfile;
Mat _detectedEdges;
int _LMWidth;
int _thres;
float _rho, _theta,_ransacThres, _houghThres;
};

58
package.xml Normal file
View File

@ -0,0 +1,58 @@
<?xml version="1.0"?>
<package>
<name>kf_tracker</name>
<version>0.0.0</version>
<description>The kf_tracker package</description>
<!-- One maintainer tag required, multiple allowed, one person per tag -->
<!-- Example: -->
<!-- <maintainer email="jane.doe@example.com">Jane Doe</maintainer> -->
<maintainer email="praveenp@cmu.edu">praveen</maintainer>
<!-- One license tag required, multiple allowed, one license per tag -->
<!-- Commonly used license strings: -->
<!-- BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
<license>TODO</license>
<!-- Url tags are optional, but mutiple are allowed, one per tag -->
<!-- Optional attribute type can be: website, bugtracker, or repository -->
<!-- Example: -->
<!-- <url type="website">http://wiki.ros.org/kf_tracker</url> -->
<!-- Author tags are optional, mutiple are allowed, one per tag -->
<!-- Authors do not have to be maintianers, but could be -->
<!-- Example: -->
<!-- <author email="jane.doe@example.com">Jane Doe</author> -->
<!-- The *_depend tags are used to specify dependencies -->
<!-- Dependencies can be catkin packages or system dependencies -->
<!-- Examples: -->
<!-- Use build_depend for packages you need at compile time: -->
<!-- <build_depend>message_generation</build_depend> -->
<!-- Use buildtool_depend for build tool packages: -->
<!-- <buildtool_depend>catkin</buildtool_depend> -->
<!-- Use run_depend for packages you need at runtime: -->
<!-- <run_depend>message_runtime</run_depend> -->
<!-- Use test_depend for packages you need only for testing: -->
<!-- <test_depend>gtest</test_depend> -->
<buildtool_depend>catkin</buildtool_depend>
<build_depend>pck_ros</build_depend>
<build_depend>roscpp</build_depend>
<build_depend>sensor_msgs</build_depend>
<build_depend>libpcl-all-dev</build_depend>
<run_depend>libpcl-all</run_depend>
<run_depend>pck_ros</run_depend>
<run_depend>roscpp</run_depend>
<run_depend>sensor_msgs</run_depend>
<!-- The export tag contains other, unspecified, tags -->
<export>
<!-- Other tools can request additional information be placed here -->
</export>
</package>

71
src/CKalmanFilter.cpp Normal file
View File

@ -0,0 +1,71 @@
#include <iostream>
#include "CKalmanFilter.h"
using namespace cv;
using namespace std;
// Constructor
CKalmanFilter::CKalmanFilter(vector<Vec2f> p){
kalman = new KalmanFilter( 4, 4, 0 ); // 4 measurement and state parameters
kalman->transitionMatrix = (Mat_<float>(4, 4) << 1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1);
// Initialization
prevResult = p;
kalman->statePre.at<float>(0) = p[0][0]; // r1
kalman->statePre.at<float>(1) = p[0][1]; // theta1
kalman->statePre.at<float>(2) = p[1][0]; // r2
kalman->statePre.at<float>(3) = p[1][1]; // theta2
kalman->statePost.at<float>(0)=p[0][0];
kalman->statePost.at<float>(1)=p[0][1];
kalman->statePost.at<float>(2)=p[1][0];
kalman->statePost.at<float>(3)=p[1][1];
setIdentity(kalman->measurementMatrix);
setIdentity(kalman->processNoiseCov, Scalar::all(1e-4));
setIdentity(kalman->measurementNoiseCov, Scalar::all(1e-1));
setIdentity(kalman->errorCovPost, Scalar::all(5));
}
// Destructor
CKalmanFilter::~CKalmanFilter(){
delete kalman;
}
// Prediction
vector<Vec2f> CKalmanFilter::predict(){
Mat prediction = kalman->predict(); // predict the state of the next frame
prevResult[0][0] = prediction.at<float>(0);prevResult[0][1] = prediction.at<float>(1);
prevResult[1][0] = prediction.at<float>(2);prevResult[1][1] = prediction.at<float>(3);
return prevResult;
}
// Correct the prediction based on the measurement
vector<Vec2f> CKalmanFilter::update(vector<Vec2f> measure){
Mat_<float> measurement(4,1);
measurement.setTo(Scalar(0));
measurement.at<float>(0) = measure[0][0];measurement.at<float>(1) = measure[0][1];
measurement.at<float>(2) = measure[1][0];measurement.at<float>(3) = measure[1][1];
Mat estimated = kalman->correct(measurement); // Correct the state of the next frame after obtaining the measurements
// Update the measurement
if(estimated.at<float>(0) < estimated.at<float>(2)){
measure[0][0] = estimated.at<float>(0);measure[0][1] = estimated.at<float>(1);
measure[1][0] = estimated.at<float>(2);measure[1][1] = estimated.at<float>(3);
}
else{
measure[0][0] = estimated.at<float>(2);measure[0][1] = estimated.at<float>(3);
measure[1][0] = estimated.at<float>(0);measure[1][1] = estimated.at<float>(1);
}
waitKey(1);
return measure; // return the measurement
}

287
src/featureDetection.cpp Normal file
View File

@ -0,0 +1,287 @@
#include <iostream>
#include <string.h>
#include "kf_tracker/featureDetection.h"
#include "kf_tracker/CKalmanFilter.h"
using namespace cv;
featureDetection::featureDetection(){
_detectedEdges = Mat();
_width = 800;
_height = 600;
_LMWidth = 10;
_thres = 40;
_rho = 1;
_theta = CV_PI/180.0;
_houghThres =100;
_ransacThres = 0.01;
}
featureDetection::~featureDetection(){
}
//////////
// This is just one approach. Other approaches can be Edge detection, Connected Components, etc.
void featureDetection::filteringPipeLine(Mat src){
Mat img;
///// Generating the mask to mask the top half of the image
Mat mask = Mat(src.size(), CV_8UC1, Scalar(1));
for(int i = 0;i < mask.rows/2; i++){
for(int j = 0;j < mask.cols;j++){
mask.at<uchar>(Point(j,i)) = 0;
}
}
src.copyTo(img,mask);
/////
_detectedEdges = Mat(img.size(),CV_8UC1); // detectedEdges
_detectedEdges.setTo(0);
int val = 0;
// iterating through each row
for (int j = img.rows/2;j<img.rows;j++){
unsigned char *imgptr = img.ptr<uchar>(j);
unsigned char *detEdgesptr = _detectedEdges.ptr<uchar>(j);
// iterating through each column seeing the difference among columns which are "width" apart
for (int i = _LMWidth;i < img.cols - _LMWidth; ++i){
if(imgptr[i]!= 0){
val = 2*imgptr[i];
val += -imgptr[i - _LMWidth];
val += -imgptr[i + _LMWidth];
val += -abs((int)(imgptr[i - _LMWidth] - imgptr[i + _LMWidth]));
val = (val<0)?0:val;
val = (val>255)?255:val;
detEdgesptr[i] = (unsigned char) val;
}
}
}
// Thresholding
threshold(_detectedEdges,_detectedEdges,_thres,255,0);
}
//////////
// Performing Hough Transform
vector<Vec2f> featureDetection::houghTransform(){
Mat _detectedEdgesRGB;
cvtColor(_detectedEdges,_detectedEdgesRGB, CV_GRAY2BGR); // converting to RGB
HoughLines(_detectedEdges,_lines,_rho,_theta,_houghThres); // Finding the hough lines
vector<Vec2f> retVar;
if (_lines.size() > 1){
Mat labels,centers;
Mat samples = Mat(_lines.size(),2,CV_32F);
for (int i = 0;i < _lines.size();i++){
samples.at<float>(i,0) = _lines[i][0];
samples.at<float>(i,1) = _lines[i][1];
}
// K means clustering to get two lines
kmeans(samples, 2, labels, TermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 1000, 0.001), 5, KMEANS_PP_CENTERS, centers );
////////////////// Using RANSAC to get rid of outliers
_lines.clear();
vector<Point2f> left;
vector<Point2f> right;
for(int i = 0;i < labels.rows; i++){
if (labels.at<int>(i) == 0) left.push_back(Point2f(samples.at<float>(i,0), samples.at<float>(i,1)));
else right.push_back(Point2f(samples.at<float>(i,0), samples.at<float>(i,1)));
}
// Performing Ransac
vector<Point2f> leftR = ransac(left);
vector<Point2f> rightR = ransac(right);
//////////////////
if (leftR.size() < 1 || rightR.size() < 1 ||
(float)(cos((leftR[0].y + leftR[1].y)/2) * cos((rightR[0].y + rightR[1].y)/2)) >= 0) return retVar;
// pushing the end points of the line to _lines
_lines.push_back(Vec2f((leftR[0].x + leftR[1].x)/2, (leftR[0].y + leftR[1].y)/2));
_lines.push_back(Vec2f((rightR[0].x + rightR[1].x)/2, (rightR[0].y + rightR[1].y)/2));
}
return _lines;
}
// Implementing RANSAC to remove outlier lines
// Picking the best estimate having maximum number of inliers
// TO DO: Better implementation
vector<Point2f> featureDetection::ransac(vector<Point2f> data){
vector<Point2f> res;
int maxInliers = 0;
// Picking up the first sample
for(int i = 0;i < data.size();i++){
Point2f p1 = data[i];
// Picking up the second sample
for(int j = i + 1;j < data.size();j++){
Point2f p2 = data[j];
int n = 0;
// Finding the total number of inliers
for (int k = 0;k < data.size();k++){
Point2f p3 = data[k];
float normalLength = norm(p2 - p1);
float distance = abs((float)((p3.x - p1.x) * (p2.y - p1.y) - (p3.y - p1.y) * (p2.x - p1.x)) / normalLength);
if (distance < _ransacThres) n++;
}
// if the current selection has more inliers, update the result and maxInliers
if (n > maxInliers) {
res.clear();
maxInliers = n;
res.push_back(p1);
res.push_back(p2);
}
}
}
return res;
}
Mat featureDetection::lineItr(Mat img, vector<Vec2f> lines, string name){
Mat imgRGB;
cvtColor(img,imgRGB,CV_GRAY2RGB); // converting the image to RGB for display
vector<Point> endPoints;
// Here, I convert the polar coordinates to Cartesian coordinates.
// Then, I extend the line to meet the boundary of the image.
for (int i = 0;i < lines.size();i++){
float r = lines[i][0];
float t = lines[i][1];
float x = r*cos(t);
float y = r*sin(t);
Point p1(cvRound(x - 1.0*sin(t)*1000), cvRound(y + cos(t)*1000));
Point p2(cvRound(x + 1.0*sin(t)*1000), cvRound(y - cos(t)*1000));
clipLine(img.size(),p1,p2);
if (p1.y > p2.y){
endPoints.push_back(p1);
endPoints.push_back(p2);
}
else{
endPoints.push_back(p2);
endPoints.push_back(p1);
}
}
Point pint;
bool check = findIntersection(endPoints,pint);
if (check){
line(imgRGB,endPoints[0],pint,Scalar(0,0,255),5);
line(imgRGB,endPoints[2],pint,Scalar(0,0,255),5);
}
/////
float xIntercept = min(endPoints[0].x,endPoints[2].x);
myfile << name << "," << xIntercept * 2 << "," << pint.x * 2 << endl;
visualize(imgRGB); // Visualize the final result
return imgRGB;
}
// Finding the Vanishing Point
bool featureDetection::findIntersection(vector<Point> endP, Point& pi){
Point x = endP[2] - endP[0];
Point d1 = endP[1] - endP[0];
Point d2 = endP[3] - endP[2];
float cross = d1.x*d2.y - d1.y*d2.x;
if (abs(cross) < 1e-8) // No intersection
return false;
double t1 = (x.x * d2.y - x.y * d2.x)/cross;
pi = endP[0] + d1 * t1;
return true;
}
// Visualize
void featureDetection::visualize(Mat imgRGB){
namedWindow("detectedFeatures");
imshow("detectedFeatures",imgRGB);
waitKey(100);
}
#ifdef testT
int main()
{
featureDetection detect; // object of featureDetection class
ippath += imname;
Mat img1 = imread(ippath,0); // Read the image
resize(img1,img1,Size(detect._width,detect._height));
detect.filteringPipeLine(img1);
vector<Vec2f> lines = detect.houghTransform(); // Hough Transform
Mat imgFinal = detect.lineItr(img1, lines, imname);
oppath += imname;
imwrite(oppath,imgFinal);
while ( getline (imageNames,imname) ){
ippath = "./images/";
oppath = "./output/";
ippath += imname;
Mat img2 = imread(ippath,0); // Read the image
resize(img2,img2,Size(detect._width,detect._height));
detect.filteringPipeLine(img2);
vector<Vec2f> lines2 = detect.houghTransform(); // Hough Transform
if (lines2.size() < 2) {
imgFinal = detect.lineItr(img2,lines, imname);
oppath += imname;
imwrite(oppath,imgFinal);
continue;
}
///// Kalman Filter to predict the next state
CKalmanFilter KF2(lines);
vector<Vec2f> pp = KF2.predict();
vector<Vec2f> lines2Final = KF2.update(lines2);
lines = lines2Final;
imgFinal = detect.lineItr(img2,lines2, imname);
/////
oppath += imname;
imwrite(oppath,imgFinal);
}
}
#endif

285
src/main.cpp Normal file
View File

@ -0,0 +1,285 @@
#include <iostream>
#include <string.h>
#include <fstream>
#include <algorithm>
#include <iterator>
#include "kf_tracker/featureDetection.h"
#include "kf_tracker/CKalmanFilter.h"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/video/video.hpp>
#include "opencv2/video/tracking.hpp"
#include <ros/ros.h>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include "pcl_ros/point_cloud.h"
#include <geometry_msgs/Point.h>
#include <std_msgs/Float32MultiArray.h>
#include <std_msgs/Int32MultiArray.h>
using namespace std;
using namespace cv;
ros::Publisher objID_pub;
// KF init
int stateDim=4;// [x,y,v_x,v_y]//,w,h]
int measDim=2;// [z_x,z_y,z_w,z_h]
int ctrlDim=0;
cv::KalmanFilter KF0(stateDim,measDim,ctrlDim,CV_32F);
cv::KalmanFilter KF1(stateDim,measDim,ctrlDim,CV_32F);
cv::KalmanFilter KF2(stateDim,measDim,ctrlDim,CV_32F);
cv::KalmanFilter KF3(stateDim,measDim,ctrlDim,CV_32F);
cv::KalmanFilter KF4(stateDim,measDim,ctrlDim,CV_32F);
cv::KalmanFilter KF5(stateDim,measDim,ctrlDim,CV_32F);
cv::Mat state(stateDim,1,CV_32F);
cv::Mat_<float> measurement(2,1);
// measurement.setTo(Scalar(0));
// calculate euclidean distance of two points
double euclidean_distance(geometry_msgs::Point& p1, geometry_msgs::Point& p2)
{
return sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y) + (p1.z - p2.z) * (p1.z - p2.z));
}
/*
//Count unique object IDs. just to make sure same ID has not been assigned to two KF_Trackers.
int countIDs(vector<int> v)
{
transform(v.begin(), v.end(), v.begin(), abs); // O(n) where n = distance(v.end(), v.begin())
sort(v.begin(), v.end()); // Average case O(n log n), worst case O(n^2) (usually implemented as quicksort.
// To guarantee worst case O(n log n) replace with make_heap, then sort_heap.
// Unique will take a sorted range, and move things around to get duplicated
// items to the back and returns an iterator to the end of the unique section of the range
auto unique_end = unique(v.begin(), v.end()); // Again n comparisons
return distance(unique_end, v.begin()); // Constant time for random access iterators (like vector's)
}
*/
/*
objID: vector containing the IDs of the clusters that should be associated with each KF_Tracker
objID[0] corresponds to KFT0, objID[1] corresponds to KFT1 etc.
*/
void KFT(const std_msgs::Float32MultiArray::ConstPtr& ccs)
{
// First predict, to update the internal statePre variable
/* Mat prediction0 = KF0.predict();
Mat prediction1 = KF1.predict();
Mat prediction2 = KF2.predict();
Mat prediction3 = KF3.predict();
Mat prediction4 = KF4.predict();
Mat prediction5 = KF5.predict();
*/
std::vector<cv::Mat> pred{KF0.predict(),KF1.predict(),KF2.predict(),KF3.predict(),KF4.predict(),KF5.predict()};
//cout<<"Pred successfull\n";
//cv::Point predictPt(prediction.at<float>(0),prediction.at<float>(1));
// cout<<"Prediction 1 ="<<prediction.at<float>(0)<<","<<prediction.at<float>(1)<<"\n";
// Get measurements
// Extract the position of the clusters forom the multiArray. To check if the data
// coming in, check the .z (every third) coordinate and that will be 0.0
std::vector<geometry_msgs::Point> clusterCenters;//clusterCenters
int i=0;
for (std::vector<float>::const_iterator it=ccs->data.begin();it!=ccs->data.end();it+=3)
{
geometry_msgs::Point pt;
pt.x=*it;
pt.y=*(it+1);
pt.z=*(it+2);
clusterCenters.push_back(pt);
}
// cout<<"CLusterCenters Obtained"<<"\n";
std::vector<geometry_msgs::Point> KFpredictions;
i=0;
for (auto it=pred.begin();it!=pred.end();it++)
{
geometry_msgs::Point pt;
pt.x=(*it).at<float>(0);
pt.y=(*it).at<float>(1);
pt.z=(*it).at<float>(2);
KFpredictions.push_back(pt);
}
// cout<<"Got predictions"<<"\n";
std::vector<int> objID;
// Find the cluster that is more probable to be belonging to a given KF.
for(int filterN=0;filterN<6;filterN++)
{
std::vector<float> distVec;
for(int n=0;n<6;n++)
distVec.push_back(euclidean_distance(KFpredictions[filterN],clusterCenters[n]));
// cout<<"distVec[="<<distVec[0]<<","<<distVec[1]<<","<<distVec[2]<<","<<distVec[3]<<","<<distVec[4]<<","<<distVec[5]<<"\n";
objID.push_back(std::distance(distVec.begin(),min_element(distVec.begin(),distVec.end())));
// cout<<"MinD for filter"<<filterN<<"="<<*min_element(distVec.begin(),distVec.end())<<"\n";
}
// cout<<"Got object IDs"<<"\n";
//countIDs(objID);// for verif/corner cases
//display objIDs
/* DEBUG
cout<<"objID= ";
for(auto it=objID.begin();it!=objID.end();it++)
cout<<*it<<" ,";
cout<<"\n";
*/
std_msgs::Int32MultiArray obj_id;
for(auto it=objID.begin();it!=objID.end();it++)
obj_id.data.push_back(*it);
objID_pub.publish(obj_id);
// convert clusterCenters from geometry_msgs::Point to floats
std::vector<std::vector<float> > cc;
for (int i=0;i<clusterCenters.size();i++)
{
vector<float> pt;
pt.push_back(clusterCenters[i].x);
pt.push_back(clusterCenters[i].y);
pt.push_back(clusterCenters[i].z);
cc.push_back(pt);
}
//cout<<"cc[5][0]="<<cc[5].at(0)<<"cc[5][1]="<<cc[5].at(1)<<"cc[5][2]="<<cc[5].at(2)<<"\n";
float meas0[3]={cc[0].at(0),cc[0].at(1)};
float meas1[3]={cc[1].at(0),cc[1].at(1)};
float meas2[3]={cc[2].at(0),cc[2].at(1)};
float meas3[3]={cc[3].at(0),cc[3].at(1)};
float meas4[3]={cc[4].at(0),cc[4].at(1)};
float meas5[3]={cc[5].at(0),cc[5].at(1)};
// The update phase
cv::Mat meas0Mat=cv::Mat(2,1,CV_32F,meas0);
cv::Mat meas1Mat=cv::Mat(2,1,CV_32F,meas1);
cv::Mat meas2Mat=cv::Mat(2,1,CV_32F,meas2);
cv::Mat meas3Mat=cv::Mat(2,1,CV_32F,meas3);
cv::Mat meas4Mat=cv::Mat(2,1,CV_32F,meas4);
cv::Mat meas5Mat=cv::Mat(2,1,CV_32F,meas5);
//cout<<"meas0Mat"<<meas0Mat<<"\n";
Mat estimated0 = KF0.correct(meas0Mat);
Mat estimated1 = KF0.correct(meas1Mat);
Mat estimated2 = KF0.correct(meas2Mat);
Mat estimated3 = KF0.correct(meas3Mat);
Mat estimated4 = KF0.correct(meas4Mat);
Mat estimated5 = KF0.correct(meas5Mat);
// cout<<"estimate="<<estimated.at<float>(0)<<","<<estimated.at<float>(1)<<"\n";
// Point statePt(estimated.at<float>(0),estimated.at<float>(1));
//cout<<"DONE KF_TRACKER\n";
}
int main(int argc, char** argv)
{
// ROS init
ros::init (argc,argv,"KFTracker");
ros::NodeHandle nh;
// Publishers to publish the state of the objects (pos and vel)
//objState1=nh.advertise<geometry_msgs::Twist> ("obj_1",1);
KF0.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
KF1.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
KF2.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
KF3.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
KF4.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
KF5.transitionMatrix = *(Mat_<float>(4, 4) << 1,0,1,0, 0,1,0,1, 0,0,1,0, 0,0,0,1);
cv::setIdentity(KF0.measurementMatrix);
cv::setIdentity(KF1.measurementMatrix);
cv::setIdentity(KF2.measurementMatrix);
cv::setIdentity(KF3.measurementMatrix);
cv::setIdentity(KF4.measurementMatrix);
cv::setIdentity(KF5.measurementMatrix);
// Process Noise Covariance Matrix Q
// [ Ex 0 0 0 0 0 ]
// [ 0 Ey 0 0 0 0 ]
// [ 0 0 Ev_x 0 0 0 ]
// [ 0 0 0 1 Ev_y 0 ]
//// [ 0 0 0 0 1 Ew ]
//// [ 0 0 0 0 0 Eh ]
setIdentity(KF0.processNoiseCov, Scalar::all(1e-4));
setIdentity(KF1.processNoiseCov, Scalar::all(1e-4));
setIdentity(KF2.processNoiseCov, Scalar::all(1e-4));
setIdentity(KF3.processNoiseCov, Scalar::all(1e-4));
setIdentity(KF4.processNoiseCov, Scalar::all(1e-4));
setIdentity(KF5.processNoiseCov, Scalar::all(1e-4));
// Meas noise cov matrix R
cv::setIdentity(KF0.measurementNoiseCov, cv::Scalar(1e-1));
cv::setIdentity(KF1.measurementNoiseCov, cv::Scalar(1e-1));
cv::setIdentity(KF2.measurementNoiseCov, cv::Scalar(1e-1));
cv::setIdentity(KF3.measurementNoiseCov, cv::Scalar(1e-1));
cv::setIdentity(KF4.measurementNoiseCov, cv::Scalar(1e-1));
cv::setIdentity(KF5.measurementNoiseCov, cv::Scalar(1e-1));
// Set initial state
KF0.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF0.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF0.statePre.at<float>(2)=0;// initial v_x
KF0.statePre.at<float>(3)=0;//initial v_y
// Set initial state
KF1.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF1.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF1.statePre.at<float>(2)=0;// initial v_x
KF1.statePre.at<float>(3)=0;//initial v_y
// Set initial state
KF2.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF2.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF2.statePre.at<float>(2)=0;// initial v_x
KF2.statePre.at<float>(3)=0;//initial v_y
// Set initial state
KF3.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF3.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF3.statePre.at<float>(2)=0;// initial v_x
KF3.statePre.at<float>(3)=0;//initial v_y
// Set initial state
KF4.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF4.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF4.statePre.at<float>(2)=0;// initial v_x
KF4.statePre.at<float>(3)=0;//initial v_y
// Set initial state
KF5.statePre.at<float>(0)=0.0;//initial x pos of the cluster
KF5.statePre.at<float>(1)=0.0;//initial y pos of the cluster
KF5.statePre.at<float>(2)=0;// initial v_x
KF5.statePre.at<float>(3)=0;//initial v_y
cout<<"About to setup callback";
// Subscribe to the clustered pointclouds
ros::Subscriber c1=nh.subscribe("ccs",100,KFT);
objID_pub = nh.advertise<std_msgs::Int32MultiArray>("obj_id", 1);
ros::spin();
}