mirror of
https://github.com/YutaItoh/3D-Eye-Tracker.git
synced 2025-09-26 23:09:19 +08:00
Updated main with drift correction.
Eye center and position can be accessed with: cout << c_end.centre.x() << "," << c_end.centre.y() << "," << c_end.centre.z() << "," << filteredEye.centre[0] << "," << filteredEye.centre[1] << "," << filteredEye.centre[2] << std::endl;
This commit is contained in:
parent
d01fdfb4d5
commit
51b0b7b3a8
166
main/main.cpp
166
main/main.cpp
@ -6,6 +6,7 @@
|
|||||||
|
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
#include <fstream>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <string>
|
#include <string>
|
||||||
@ -19,7 +20,7 @@
|
|||||||
#include <boost/filesystem/fstream.hpp>
|
#include <boost/filesystem/fstream.hpp>
|
||||||
#include <boost/thread.hpp>
|
#include <boost/thread.hpp>
|
||||||
|
|
||||||
|
#include "opencv2/opencv.hpp"
|
||||||
#include <opencv2/core/core.hpp>
|
#include <opencv2/core/core.hpp>
|
||||||
#include <opencv2/imgproc/imgproc.hpp>
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
#include <opencv2/highgui/highgui.hpp>
|
#include <opencv2/highgui/highgui.hpp>
|
||||||
@ -28,7 +29,6 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "pupilFitter.h" // 2D pupil detector
|
#include "pupilFitter.h" // 2D pupil detector
|
||||||
|
|
||||||
#include "timer.h"
|
#include "timer.h"
|
||||||
|
|
||||||
#include "eye_model_updater.h" // 3D model builder
|
#include "eye_model_updater.h" // 3D model builder
|
||||||
@ -45,11 +45,13 @@ enum InputMode { CAMERA, CAMERA_MONO, VIDEO, IMAGE };
|
|||||||
|
|
||||||
int main(int argc, char *argv[]){
|
int main(int argc, char *argv[]){
|
||||||
|
|
||||||
|
|
||||||
// Variables for FPS
|
// Variables for FPS
|
||||||
eye_tracker::FrameRateCounter frame_rate_counter;
|
eye_tracker::FrameRateCounter frame_rate_counter;
|
||||||
|
|
||||||
bool kVisualization = false;
|
bool kVisualization = false;
|
||||||
kVisualization = true;
|
kVisualization = true;
|
||||||
|
singleeyefitter::EyeModelFitter::Circle curr_circle;
|
||||||
|
|
||||||
InputMode input_mode =
|
InputMode input_mode =
|
||||||
//InputMode::VIDEO; // Set a video as a video source
|
//InputMode::VIDEO; // Set a video as a video source
|
||||||
@ -169,8 +171,8 @@ int main(int argc, char *argv[]){
|
|||||||
eyecams[1] = std::make_unique<eye_tracker::EyeCamera>(camera_indices[1], false);
|
eyecams[1] = std::make_unique<eye_tracker::EyeCamera>(camera_indices[1], false);
|
||||||
#else
|
#else
|
||||||
// DirectShow frame grabber
|
// DirectShow frame grabber
|
||||||
eyecams[0] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam1 ID0");
|
eyecams[0] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam1 ID1");
|
||||||
eyecams[1] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam1 ID2");
|
eyecams[1] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam2 ID2");
|
||||||
#endif
|
#endif
|
||||||
eye_model_updaters[0] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
eye_model_updaters[0] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
||||||
eye_model_updaters[1] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
eye_model_updaters[1] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
||||||
@ -180,11 +182,11 @@ int main(int argc, char *argv[]){
|
|||||||
file_stems = { "cam0", "cam1" };
|
file_stems = { "cam0", "cam1" };
|
||||||
break;
|
break;
|
||||||
case InputMode::CAMERA_MONO:
|
case InputMode::CAMERA_MONO:
|
||||||
eyecams[0] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam1 ID0"); //
|
eyecams[0] = std::make_unique<eye_tracker::EyeCameraDS>("Pupil Cam1 ID1"); //
|
||||||
eye_model_updaters[0] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
eye_model_updaters[0] = std::make_unique<eye_tracker::EyeModelUpdater>(focal_length, 5, 0.5);
|
||||||
camera_undistorters[0] = std::make_unique<eye_tracker::CameraUndistorter>(K, distCoeffs);
|
camera_undistorters[0] = std::make_unique<eye_tracker::CameraUndistorter>(K, distCoeffs);
|
||||||
window_names = { "Cam0" };
|
window_names = { "Cam1" };
|
||||||
file_stems = { "cam0" };
|
file_stems = { "cam1" };
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
@ -202,12 +204,45 @@ int main(int argc, char *argv[]){
|
|||||||
PupilFitter pupilFitter;
|
PupilFitter pupilFitter;
|
||||||
pupilFitter.setDebug(false);
|
pupilFitter.setDebug(false);
|
||||||
/////////////////////////
|
/////////////////////////
|
||||||
|
|
||||||
|
//std::getchar();
|
||||||
|
//For running a video
|
||||||
|
//VideoCapture inputVideo1("C:\\Documents\\Osaka\\Research\\Eye Tracking\\Benchmark Videos\\eyetracking4.avi"); // Open input
|
||||||
|
VideoWriter outputVideo1;
|
||||||
|
outputVideo1.open("C:\\Documents\\Osaka\\Research\\Eye Tracking\\Benchmark Videos\\outSaccade.avi",
|
||||||
|
CV_FOURCC('W', 'M', 'V', '2'),
|
||||||
|
20,
|
||||||
|
cv::Size(640,480),
|
||||||
|
true);
|
||||||
|
Mat frame1;
|
||||||
|
|
||||||
// Main loop
|
// Main loop
|
||||||
const char kTerminate = 27;//Escape 0x1b
|
const char kTerminate = 27;//Escape 0x1b
|
||||||
bool is_run = true;
|
bool is_run = true;
|
||||||
|
bool isSaccade = false;
|
||||||
|
bool isBlink = false;
|
||||||
|
int blinkCount = 0; //holds the number of blinks for this video
|
||||||
|
int saccadeCount = 0; //holds the number of saccades for this video
|
||||||
|
bool prevSaccade = false; //added if a saccade value was detected in the previous frame
|
||||||
|
vector<float> timeData; //vector holding timestamps in ms corresponding to gaze data for N frames
|
||||||
|
vector<float> xData; //corresponding x eye rotations for N frames
|
||||||
|
vector<float> yData; //corresponding y eye rotations for N frames
|
||||||
|
vector<float> intensityData; //holds average intensity of last N frames
|
||||||
|
vector<singleeyefitter::EyeModelFitter::Sphere> eyes; //holds a vector of spheres for the eye model filter
|
||||||
|
|
||||||
|
|
||||||
while (is_run) {
|
while (is_run) {
|
||||||
|
|
||||||
|
|
||||||
|
//inputVideo1 >> frame1;//for video
|
||||||
|
//if (frame1.empty()) {//for video
|
||||||
|
// break;
|
||||||
|
//}
|
||||||
|
|
||||||
|
//imshow("test", frame1);//for video
|
||||||
|
//waitKey(0);
|
||||||
|
|
||||||
|
|
||||||
// Fetch key input
|
// Fetch key input
|
||||||
char kKEY = 0;
|
char kKEY = 0;
|
||||||
if (kVisualization) {
|
if (kVisualization) {
|
||||||
@ -219,21 +254,31 @@ int main(int argc, char *argv[]){
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Fetch images
|
// Fetch images
|
||||||
for (size_t cam = 0; cam < kCameraNums; cam++) {
|
for (size_t cam = 0; cam < kCameraNums; cam++) {
|
||||||
eyecams[cam]->fetchFrame(images[cam]);
|
|
||||||
|
eyecams[cam] -> fetchFrame(images[cam]);
|
||||||
|
|
||||||
}
|
}
|
||||||
// Process each camera images
|
// Process each camera images
|
||||||
for (size_t cam = 0; cam < kCameraNums; cam++) {
|
for (size_t cam = 0; cam < kCameraNums; cam++) {
|
||||||
|
|
||||||
cv::Mat &img = images[cam];
|
cv::Mat &img = images[cam];
|
||||||
|
//img = frame1; //for video
|
||||||
|
//imshow("test", img);
|
||||||
|
//waitKey(1);
|
||||||
|
|
||||||
if (img.empty()) {
|
if (img.empty()) {
|
||||||
//is_run = false;
|
//is_run = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Undistort a captured image
|
// Undistort a captured image
|
||||||
camera_undistorters[cam]->undistort(img, img);
|
//camera_undistorters[cam]->undistort(img, img);
|
||||||
|
|
||||||
|
//cv::Mat img_rgb_debug = frame1.clone(); \\for video
|
||||||
cv::Mat img_rgb_debug = img.clone();
|
cv::Mat img_rgb_debug = img.clone();
|
||||||
cv::Mat img_grey;
|
cv::Mat img_grey;
|
||||||
|
|
||||||
@ -244,34 +289,61 @@ int main(int argc, char *argv[]){
|
|||||||
case 'p':
|
case 'p':
|
||||||
eye_model_updaters[cam]->add_fitter_max_count(10);
|
eye_model_updaters[cam]->add_fitter_max_count(10);
|
||||||
break;
|
break;
|
||||||
|
case 'q':
|
||||||
|
is_run = false;
|
||||||
|
break;
|
||||||
|
case 'z':
|
||||||
|
eye_model_updaters[cam]->rm_oldest_observation();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
const clock_t begin_time = clock();
|
||||||
|
|
||||||
// 2D ellipse detection
|
// 2D ellipse detection
|
||||||
std::vector<cv::Point2f> inlier_pts;
|
std::vector<cv::Point2f> inlier_pts;
|
||||||
cv::cvtColor(img, img_grey, CV_RGB2GRAY);
|
cv::cvtColor(img, img_grey, CV_RGB2GRAY);
|
||||||
cv::RotatedRect rr_pf;
|
cv::RotatedRect rr_pf;
|
||||||
bool is_pupil_found = pupilFitter.pupilAreaFitRR(img_grey, rr_pf, inlier_pts);
|
|
||||||
|
|
||||||
|
//imshow("test", img_grey);
|
||||||
|
|
||||||
|
bool is_pupil_found = pupilFitter.pupilAreaFitRR(img_grey, rr_pf, inlier_pts, 15, 0, 0, 20, 30, 250, 6);
|
||||||
|
|
||||||
|
//cout << "pupil fitter time: " << float(clock() - begin_time) / CLOCKS_PER_SEC << endl;
|
||||||
|
|
||||||
|
const clock_t begin_time2 = clock();
|
||||||
|
|
||||||
singleeyefitter::Ellipse2D<double> el = singleeyefitter::toEllipse<double>(eye_tracker::toImgCoordInv(rr_pf, img, 1.0));
|
singleeyefitter::Ellipse2D<double> el = singleeyefitter::toEllipse<double>(eye_tracker::toImgCoordInv(rr_pf, img, 1.0));
|
||||||
|
|
||||||
|
//cout << "singleeyefitter time: " << float(clock() - begin_time2) / CLOCKS_PER_SEC << endl;
|
||||||
|
|
||||||
|
|
||||||
// 3D eye pose estimation
|
// 3D eye pose estimation
|
||||||
bool is_reliable = false;
|
bool is_reliable = false;
|
||||||
bool is_added = false;
|
bool is_added = false;
|
||||||
const bool force_add = false;
|
const bool force_add = false;
|
||||||
const double kReliabilityThreshold = 0.8;// 0.96;
|
const double kReliabilityThreshold = 0.0;//0.96;
|
||||||
double ellipse_realiability = 0.0; /// Reliability of a detected 2D ellipse based on 3D eye model
|
double ellipse_reliability = 0.0; /// Reliability of a detected 2D ellipse based on 3D eye model
|
||||||
if (is_pupil_found) {
|
if (is_pupil_found) {
|
||||||
if (eye_model_updaters[cam]->is_model_built()) {
|
if (eye_model_updaters[cam]->is_model_built()) {
|
||||||
ellipse_realiability = eye_model_updaters[cam]->compute_reliability(img, el, inlier_pts);
|
ellipse_reliability = eye_model_updaters[cam]->compute_reliability(img, el, inlier_pts);
|
||||||
is_reliable = (ellipse_realiability > kReliabilityThreshold);
|
is_reliable = (ellipse_reliability > kReliabilityThreshold);
|
||||||
// is_reliable = true;
|
// is_reliable = true;
|
||||||
|
|
||||||
|
|
||||||
|
eye_model_updaters[cam]->rm_oldest_observation();
|
||||||
|
eye_model_updaters[cam]->add_observation(img_grey, el, inlier_pts, false);
|
||||||
|
eye_model_updaters[cam]->force_rebuild_model();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
cout << "oops" << endl;
|
||||||
is_added = eye_model_updaters[cam]->add_observation(img_grey, el, inlier_pts, force_add);
|
is_added = eye_model_updaters[cam]->add_observation(img_grey, el, inlier_pts, force_add);
|
||||||
}
|
}
|
||||||
|
//TODO test in Unity to see how well this works
|
||||||
}
|
}
|
||||||
|
|
||||||
// Visualize results
|
// Visualize results
|
||||||
@ -284,9 +356,58 @@ int main(int argc, char *argv[]){
|
|||||||
|
|
||||||
// 3D eye ball
|
// 3D eye ball
|
||||||
if (eye_model_updaters[cam]->is_model_built()) {
|
if (eye_model_updaters[cam]->is_model_built()) {
|
||||||
cv::putText(img, "Reliability: " + std::to_string(ellipse_realiability), cv::Point(30, 440), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 128, 255), 1);
|
cv::putText(img, "Reliability: " + std::to_string(ellipse_reliability), cv::Point(30, 440), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 128, 255), 1);
|
||||||
if (is_reliable) {
|
if (is_reliable) {
|
||||||
|
|
||||||
|
singleeyefitter::Sphere<double> medianCircle;
|
||||||
|
//bool useDriftCorrection = false;
|
||||||
|
//if (eyes.size() > 0) {
|
||||||
|
// medianCircle = eye_model_updaters[cam]->eyeModelFilter(curr_circle, eyes);
|
||||||
|
// useDriftCorrection = true;
|
||||||
|
//}
|
||||||
|
|
||||||
|
//std::cout << "after filter: " << curr_circle.radius << std::endl;
|
||||||
|
|
||||||
eye_model_updaters[cam]->render(img_rgb_debug, el, inlier_pts);
|
eye_model_updaters[cam]->render(img_rgb_debug, el, inlier_pts);
|
||||||
|
eye_model_updaters[cam]->set_fitter_max_count(130); //manually sets max count
|
||||||
|
|
||||||
|
//3D filtered eye model
|
||||||
|
medianCircle = eye_model_updaters[cam]->eyeModelFilter(eye_model_updaters[cam]->fitter().eye, eyes, 500);
|
||||||
|
if (medianCircle.radius < 10) {
|
||||||
|
medianCircle.radius = 10;
|
||||||
|
}
|
||||||
|
eye_model_updaters[cam]->setEye(medianCircle);
|
||||||
|
curr_circle = eye_model_updaters[cam]->unproject(img, el, inlier_pts);
|
||||||
|
// 3D pupil (relative to filtered eye model)
|
||||||
|
singleeyefitter::Ellipse2D<double> pupil_el(singleeyefitter::project(curr_circle, focal_length));
|
||||||
|
cv::RotatedRect rr_pupil = eye_tracker::toImgCoord(singleeyefitter::toRotatedRect(pupil_el), img, 1.0f);
|
||||||
|
singleeyefitter::EyeModelFitter::Sphere filteredEye(medianCircle.centre, medianCircle.radius);
|
||||||
|
|
||||||
|
cout << "radius was " << medianCircle.radius << endl;
|
||||||
|
cv::RotatedRect rr_eye = eye_tracker::toImgCoord(singleeyefitter::toRotatedRect(
|
||||||
|
singleeyefitter::project(filteredEye, focal_length)), img, 1.0f);
|
||||||
|
cv::ellipse(img_rgb_debug, rr_eye, cv::Vec3b(255, 222, 222), 2, CV_AA);
|
||||||
|
cv::circle(img_rgb_debug, rr_eye.center, 3, cv::Vec3b(255, 32, 32), 2); // Eyeball center projection
|
||||||
|
singleeyefitter::EyeModelFitter::Circle c_end = curr_circle;
|
||||||
|
c_end.centre = curr_circle.centre + (10.0)*curr_circle.normal;
|
||||||
|
|
||||||
|
cv::line(img_rgb_debug, rr_eye.center, rr_pupil.center, cv::Vec3b(25, 22, 222), 3, CV_AA);
|
||||||
|
|
||||||
|
//update time, xdata, and ydata vectors for input into saccade detector
|
||||||
|
dataAdd(curr_circle.centre(0), 5, xData);
|
||||||
|
dataAdd(curr_circle.centre(1), 5, yData);
|
||||||
|
dataAdd(clock(), 5, timeData);
|
||||||
|
float intensity = 0;
|
||||||
|
|
||||||
|
//to-Unity write
|
||||||
|
//std::ofstream myfile("C:\\Users\\O\\Documents\\Visual Studio 2013\\Projects\\EyeTrackerRealTime\\coordinates.txt");
|
||||||
|
//std::ofstream myfile;
|
||||||
|
//myfile.open("C:\\Documents\\Osaka\\Research\\Presence 2017\\testcoordinates.txt", std::ios_base::app);
|
||||||
|
//myfile << "" << c_end.centre.x() << "," << c_end.centre.y() << "," << c_end.centre.z()
|
||||||
|
// << "," << filteredEye.centre[0] << "," << filteredEye.centre[1] << "," << filteredEye.centre[2] <<
|
||||||
|
// std::endl;
|
||||||
|
//myfile.close();
|
||||||
|
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
eye_model_updaters[cam]->render_status(img_rgb_debug);
|
eye_model_updaters[cam]->render_status(img_rgb_debug);
|
||||||
@ -294,7 +415,10 @@ int main(int argc, char *argv[]){
|
|||||||
cv::Point(30, 440), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 128, 255), 2);
|
cv::Point(30, 440), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 128, 255), 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
float confidence = 0;
|
||||||
|
//outputVideo1 << img_rgb_debug;
|
||||||
cv::imshow(window_names[cam], img_rgb_debug);
|
cv::imshow(window_names[cam], img_rgb_debug);
|
||||||
|
|
||||||
|
|
||||||
} // Visualization
|
} // Visualization
|
||||||
|
|
||||||
@ -309,8 +433,16 @@ int main(int argc, char *argv[]){
|
|||||||
ss = 0;
|
ss = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
}// Main capture loop
|
singleeyefitter::EyeModelFitter::Circle curr_circle;
|
||||||
|
singleeyefitter::EyeModelFitter::Circle c_end = curr_circle;
|
||||||
|
c_end.centre = curr_circle.centre + (10.0)*curr_circle.normal; // Unit: mm
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}// Main capture loop
|
||||||
|
outputVideo1.release();
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user