#include #include #include "opencv2/video/tracking.hpp" #include "opencv2/highgui/highgui.hpp" using namespace cv; using namespace std; inline bool isFlowCorrect(Point2f u) { return !cvIsNaN(u.x) && !cvIsNaN(u.y) && fabs(u.x) < 1e9 && fabs(u.y) < 1e9; } static Vec3b computeColor(float fx, float fy) { static bool first = true; // relative lengths of color transitions: // these are chosen based on perceptual similarity // (e.g. one can distinguish more shades between red and yellow // than between yellow and green) const int RY = 15; const int YG = 6; const int GC = 4; const int CB = 11; const int BM = 13; const int MR = 6; const int NCOLS = RY + YG + GC + CB + BM + MR; static Vec3i colorWheel[NCOLS]; if (first) { int k = 0; for (int i = 0; i < RY; ++i, ++k) colorWheel[k] = Vec3i(255, 255 * i / RY, 0); for (int i = 0; i < YG; ++i, ++k) colorWheel[k] = Vec3i(255 - 255 * i / YG, 255, 0); for (int i = 0; i < GC; ++i, ++k) colorWheel[k] = Vec3i(0, 255, 255 * i / GC); for (int i = 0; i < CB; ++i, ++k) colorWheel[k] = Vec3i(0, 255 - 255 * i / CB, 255); for (int i = 0; i < BM; ++i, ++k) colorWheel[k] = Vec3i(255 * i / BM, 0, 255); for (int i = 0; i < MR; ++i, ++k) colorWheel[k] = Vec3i(255, 0, 255 - 255 * i / MR); first = false; } const float rad = sqrt(fx * fx + fy * fy); const float a = atan2(-fy, -fx) / CV_PI; const float fk = (a + 1.0f) / 2.0f * (NCOLS - 1); const int k0 = static_cast(fk); const int k1 = (k0 + 1) % NCOLS; const float f = fk - k0; Vec3b pix; for (int b = 0; b < 3; b++) { const float col0 = colorWheel[k0][b] / 255.0; const float col1 = colorWheel[k1][b] / 255.0; float col = (1 - f) * col0 + f * col1; if (rad <= 1) col = 1 - rad * (1 - col); // increase saturation with radius else col *= .75; // out of range pix[2 - b] = static_cast(255.0 * col); } return pix; } static void drawOpticalFlow(const Mat_& flow, Mat& dst, float maxmotion = -1) { dst.create(flow.size(), CV_8UC3); dst.setTo(Scalar::all(0)); // determine motion range: float maxrad = maxmotion; if (maxmotion <= 0) { maxrad = 1; for (int y = 0; y < flow.rows; ++y) { for (int x = 0; x < flow.cols; ++x) { Point2f u = flow(y, x); if (!isFlowCorrect(u)) continue; maxrad = max(maxrad, sqrt(u.x * u.x + u.y * u.y)); } } } for (int y = 0; y < flow.rows; ++y) { for (int x = 0; x < flow.cols; ++x) { Point2f u = flow(y, x); if (isFlowCorrect(u)) dst.at(y, x) = computeColor(u.x / maxrad, u.y / maxrad); } } } // binary file format for flow data specified here: // http://vision.middlebury.edu/flow/data/ static void writeOpticalFlowToFile(const Mat_& flow, const string& fileName) { static const char FLO_TAG_STRING[] = "PIEH"; ofstream file(fileName.c_str(), ios_base::binary); file << FLO_TAG_STRING; file.write((const char*) &flow.cols, sizeof(int)); file.write((const char*) &flow.rows, sizeof(int)); for (int i = 0; i < flow.rows; ++i) { for (int j = 0; j < flow.cols; ++j) { const Point2f u = flow(i, j); file.write((const char*) &u.x, sizeof(float)); file.write((const char*) &u.y, sizeof(float)); } } } int main(int argc, const char* argv[]) { if (argc < 3) { cerr << "Usage : " << argv[0] << " []" << endl; return -1; } Mat frame0 = imread(argv[1], IMREAD_GRAYSCALE); Mat frame1 = imread(argv[2], IMREAD_GRAYSCALE); if (frame0.empty()) { cerr << "Can't open image [" << argv[1] << "]" << endl; return -1; } if (frame1.empty()) { cerr << "Can't open image [" << argv[2] << "]" << endl; return -1; } if (frame1.size() != frame0.size()) { cerr << "Images should be of equal sizes" << endl; return -1; } Mat_ flow; OpticalFlowDual_TVL1 tvl1; const double start = getTickCount(); tvl1(frame0, frame1, flow); const double timeSec = (getTickCount() - start) / getTickFrequency(); cout << "calcOpticalFlowDual_TVL1 : " << timeSec << " sec" << endl; Mat out; drawOpticalFlow(flow, out); if (argc == 4) writeOpticalFlowToFile(flow, argv[3]); imshow("Flow", out); waitKey(); return 0; }