-
Notifications
You must be signed in to change notification settings - Fork 8
/
main.cpp
58 lines (55 loc) · 1.65 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
/* facial keypoints tracker main function, implemented by Lixuan. */
/* email: [email protected] [email protected] */
#include "tracker.h"
#include <fstream>
#include <opencv2/opencv.hpp>
#include <ctime>
using namespace std;
using namespace cv;
int main(int argc, char **argv)
{
char detectionmodel[] = "./models/haarcascade_frontalface_default.xml";
char trackingmodel[] = "./models/trackingmodel.bin";
tracker::trackerClass mytracker(3,0.3);
bool isFailed = mytracker.load_model(detectionmodel,trackingmodel);
if(isFailed) return EXIT_FAILURE;
bool control = true;
cv::VideoCapture cap;
if(*argv[1]=='0')
{
cap.open(0);
}else
{
cap.open( argv[1]);
}
cv::Mat frame;
clock_t startTime, endTime;
std::vector<float> face_pts(mytracker.GetPtsNum()*2);
char text[100];
clock_t iters_times;
float fps = 0;
double pose[6];
while (control)
{
cap >> frame;
if (frame.empty())
break;
startTime = clock();
bool isTracking = mytracker.Track2D(frame, face_pts);//regression
mytracker.GetPose(pose);
endTime = clock();
iters_times = max(int(endTime - startTime),1);
fps = float(CLOCKS_PER_SEC)/float(iters_times);
sprintf(text,"fps: %.2f",fps);
cv::putText(frame,text,cv::Point(10,20),1,1,cv::Scalar(0,255,0),2);
sprintf(text,"pitch: %.1f , yaw: %.1f , roll: %.1f", pose[0]/(2*M_PI)*360,pose[1]/(2*M_PI)*360,pose[2]/(2*M_PI)*360);
cv::putText(frame,text,cv::Point(10,50),1,1,cv::Scalar(0,255,0),2);
if(isTracking)
tracker::show_image("test",frame, face_pts);
else
imshow("test",frame);
if(waitKey(1) == 27)//esc exit
control = false;
}
return EXIT_SUCCESS;
}