图像分析之光流之经典

本文推导了几种经典的光流算法,分别来自文献[Lucas and Kanade, 1981],[Farneback, 2003] ,[Horn and Schunk, 1981]和[Brox et al. 2004]。为了公式符号的统一,没有采用原始文献的符号标记,为了简化推导,推导步骤与原始文献也不一样。此外,本文还给出了LK光流算法的实现。

更新记录

本文持续更新!如文中有错误,或你对本文有疑问或建议,欢迎留言或发邮件至quarrying#qq.com!

2016年01月20日,发表博文。

2016年01月23日,小修改。

2016年01月24日,增加内容,Farneback光流估计

参考文献

[Lucas and Kanade, 1981] B.D. Lucas and T. Kanade, An Iterative Image Registration Technique with an Application to Stereo Vision

[Farneback, 2003] Two-Frame Motion Estimation Based on Polynomial Expansion

[Horn and Schunk, 1981] Determine Optical Flow

[Brox et al. 2004] High accuracy optical flow estimation based on a theory for warping

http://www.cnblogs.com/dzyBK/p/5096860.html

https://en.wikipedia.org/wiki/Lucas%E2%80%93Kanade_method

https://en.wikipedia.org/wiki/Horn%E2%80%93Schunck_method

https://en.wikipedia.org/wiki/Optical_Flow

相关代码

LK光流实现,仅展示原理,没有注重效率。

/**
* @brief	LK optical flow
* @author	quarryman
* @date		2016-01-20
*/
#include <cv.h>
#include <highgui.h>

#define KCV_IMAGE_ROW(img,type,row)\
	((type*)((img)->imageData + (img)->widthStep*(row)))
#define KCV_IMAGE_ELEM_PTR(img,type,row,col)\
	(((type*)((img)->imageData + (img)->widthStep*(row))) + (col))
#define KCV_IMAGE_ELEM(img,type,row,col)\
	(((type*)((img)->imageData + (img)->widthStep*(row)))[(col)])

typedef struct tagKCvOpticalFlowProcessData
{
	int frameNo;
	int windowSize;
	int skipStep;
	IplImage* currFrameGray;
	IplImage* prevFrame;
	IplImage* frameSmooth;
	IplImage* dx;
	IplImage* dy;
	IplImage* diff;
	CvMat* A;
	CvMat* At;
	CvMat* AtA;
	CvMat* b;
	CvMat* Atb;
	CvMat* x;
}KCvOpticalFlowProcessData;

typedef void* KCvHandle;

KCvHandle kcvOpticalFlowCreate(int w, int h, int windowSize, int skipStep)
{
	KCvOpticalFlowProcessData* processData = NULL;
	processData = (KCvOpticalFlowProcessData*)malloc(sizeof(*processData));

	processData->frameNo = 0;
	windowSize = windowSize | 1;
	processData->windowSize = windowSize;
	processData->skipStep = skipStep;
	CvSize size = { w, h };
	processData->currFrameGray = cvCreateImage(size, 8, 1);
	processData->prevFrame = cvCreateImage(size, 8, 1);
	processData->frameSmooth = cvCreateImage(size, 8, 1);
	processData->dx = cvCreateImage(size, IPL_DEPTH_16S, 1);
	processData->dy = cvCreateImage(size, IPL_DEPTH_16S, 1);
	processData->diff = cvCreateImage(size, IPL_DEPTH_16S, 1);
	processData->A = cvCreateMat(windowSize*windowSize, 2, CV_32F);
	processData->At = cvCreateMat(2, windowSize*windowSize, CV_32F);
	processData->AtA = cvCreateMat(2, 2, CV_32F);
	processData->b = cvCreateMat(windowSize*windowSize, 1, CV_32F);
	processData->Atb = cvCreateMat(2, 1, CV_32F);
	processData->x = cvCreateMat(2, 1, CV_32F);

	return (KCvHandle)processData;
}

int kcvOpticalFlowRelease(KCvHandle* handle)
{
	if ((handle != NULL) && (*handle != NULL))
	{
		KCvOpticalFlowProcessData* processData = (KCvOpticalFlowProcessData*)*handle;
		cvReleaseImage(&processData->currFrameGray);
		cvReleaseImage(&processData->prevFrame);
		cvReleaseImage(&processData->frameSmooth);
		cvReleaseImage(&processData->dx);
		cvReleaseImage(&processData->dy);
		cvReleaseImage(&processData->diff);
		cvReleaseMat(&processData->A);
		cvReleaseMat(&processData->At);
		cvReleaseMat(&processData->AtA);
		cvReleaseMat(&processData->b);
		cvReleaseMat(&processData->Atb);
		cvReleaseMat(&processData->x);

		free(processData);
		*handle = NULL;
	}
	return 0;
}

int kcvOpticalFlowProcess(KCvHandle handle, const IplImage* currFrame,
	IplImage* velx, IplImage* vely)
{
	KCvOpticalFlowProcessData* processData = (KCvOpticalFlowProcessData*)handle;
	int frameNo = processData->frameNo;
	int windowSize = processData->windowSize;
	int windowRadius = windowSize >> 1;
	int skipStep = processData->skipStep;
	IplImage* currFrameGray = processData->currFrameGray;
	IplImage* prevFrame = processData->prevFrame;
	IplImage* frameSmooth = processData->frameSmooth;
	IplImage* dx = processData->dx;
	IplImage* dy = processData->dy;
	IplImage* diff = processData->diff;
	CvMat* A = processData->A;
	CvMat* At = processData->At;
	CvMat* AtA = processData->AtA;
	CvMat* b = processData->b;
	CvMat* Atb = processData->Atb;
	CvMat* x = processData->x;

	if (currFrame->nChannels == 1)
	{
		cvCopy(currFrame, currFrameGray);
	}
	else
	{
		cvCvtColor(currFrame, currFrameGray, CV_BGR2GRAY);
	}

	if (frameNo < 1)
	{
		cvSetZero(velx);
		cvSetZero(vely);
	}
	else
	{
		cvSmooth(currFrameGray, frameSmooth);
		cvSobel(frameSmooth, dx, 1, 0);
		cvSobel(frameSmooth, dy, 0, 1);
		cvSub(prevFrame, currFrameGray, diff);

		int w = dx->width;
		int h = dx->height;
		int sstep = dx->widthStep;
		int dstep = velx->widthStep;
		const short* ptrDx = (short*)dx->imageData;
		const short* ptrDy = (short*)dy->imageData;
		const short* ptrDiff = (short*)diff->imageData;
		float* ptrVelx = KCV_IMAGE_ELEM_PTR(velx, float, windowRadius, windowRadius);
		float* ptrVely = KCV_IMAGE_ELEM_PTR(vely, float, windowRadius, windowRadius);

		sstep /= sizeof(ptrDx[0]);
		dstep /= sizeof(ptrVelx[0]);
		for (int i = windowRadius; i < h - windowRadius; i += skipStep)
		{
			for (int j = windowRadius; j < w - windowRadius; j += skipStep)
			{
				int index = 0;
				const short* ptrDx2 = ptrDx + j;
				const short* ptrDy2 = ptrDy + j;
				const short* ptrDiff2 = ptrDiff + j;
				for (int ii = -windowRadius; ii <= windowRadius; ++ii)
				{
					for (int jj = -windowRadius; jj <= windowRadius; ++jj)
					{
						CV_MAT_ELEM(*A, float, index, 0) = ptrDx2[jj] / 4.0;
						CV_MAT_ELEM(*A, float, index, 1) = ptrDy2[jj] / 4.0;
						CV_MAT_ELEM(*b, float, index, 0) = ptrDiff2[jj];
						++index;
					}
					ptrDx2 += sstep;
					ptrDy2 += sstep;
					ptrDiff2 += sstep;
				}
				cvTranspose(A, At);
				cvMatMul(At, A, AtA);
				cvMatMul(At, b, Atb);
				cvSolve(AtA, Atb, x);
				ptrVelx[j] = CV_MAT_ELEM(*x, float, 0, 0);
				ptrVely[j] = CV_MAT_ELEM(*x, float, 1, 0);
			}
			ptrDx += sstep * skipStep;
			ptrDy += sstep * skipStep;
			ptrDiff += sstep * skipStep;
			ptrVelx += dstep * skipStep;
			ptrVely += dstep * skipStep;
		}
	}

	cvCopy(currFrameGray, prevFrame);
	++processData->frameNo;

	return 0;
}

void kcvDrawFrameNo(IplImage* img, int frameNo, CvScalar clr)
{
	char text[64] = { 0 };
	CvFont font = cvFont(1, 1);
	sprintf(text, "%d", frameNo);
	cvPutText(img, text, cvPoint(10, 20), &font, clr);
}

void kcvDrawArrow(CvArr* img, CvPoint pt1, CvPoint pt2, int length, int theta,
	CvScalar color, int thickness = 1, int line_type = 8, int shift = 0)
{
	// 箭头主线的倾斜角度
	double angle = atan2((double)pt1.y - pt2.y, (double)pt1.x - pt2.x);

	// 绘制箭头主线
	cvLine(img, pt1, pt2, color, thickness, CV_AA, 0);
	// 绘制箭头上方短线
	CvPoint arrow;
	arrow.x = (int)(pt2.x + length * cos(angle + CV_PI*theta / 180));
	arrow.y = (int)(pt2.y + length * sin(angle + CV_PI*theta / 180));
	cvLine(img, arrow, pt2, color, thickness, CV_AA, 0);
	// 绘制箭头上方短线
	arrow.x = (int)(pt2.x + length * cos(angle - CV_PI*theta / 180));
	arrow.y = (int)(pt2.y + length * sin(angle - CV_PI*theta / 180));
	cvLine(img, arrow, pt2, color, thickness, CV_AA, 0);
}

void kcvDrawArrow(CvArr* img, CvPoint pt, double lineLength, double lineTheta,
	int length, int theta, CvScalar color, int thickness = 1, int line_type = 8, int shift = 0)
{
	CvPoint pt2;
	pt2.x = static_cast<int>(pt.x + lineLength*cos(CV_PI*lineTheta / 180));
	pt2.y = static_cast<int>(pt.y + lineLength*sin(CV_PI*lineTheta / 180));

	kcvDrawArrow(img, pt, pt2, length, theta, color, thickness, line_type, shift);
}

int main(int argc, char** argv)
{
	CvCapture* capture = cvCaptureFromFile("E:\\百度云同步盘\\2视觉之Datasets\\测试视频之常用\\PEA-120412.avi");
	if (capture == NULL)
	{
		fprintf(stderr, "Can not open video file\n");
		return -1;
	}
	int w = (int)cvGetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH);
	int h = (int)cvGetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT);
	int windowSize = 15;
	int skipStep = windowSize;

	KCvHandle handle = kcvOpticalFlowCreate(w, h, windowSize, skipStep);
	IplImage* velx = cvCreateImage(cvSize(w, h), 32, 1);
	IplImage* vely = cvCreateImage(cvSize(w, h), 32, 1);

	int key = 0, isPaused = 1;
	int frameNo = 0, skipFrames = 0;
	IplImage* frame = NULL;
	while (frame = cvQueryFrame(capture))
	{
		if (frameNo < skipFrames){
			frameNo++;
			continue;
		}

		kcvOpticalFlowProcess(handle, frame, velx, vely);
		kcvDrawFrameNo(frame, frameNo++, CV_RGB(255, 255, 255));

		int w = velx->width;
		int h = velx->height;
		int step = velx->widthStep;
		int windowRadius = (windowSize | 1) >> 1;
		const float* ptrVelx = KCV_IMAGE_ELEM_PTR(velx, float, windowRadius, windowRadius);
		const float* ptrVely = KCV_IMAGE_ELEM_PTR(vely, float, windowRadius, windowRadius);
		step /= sizeof(ptrVelx[0]);
		for (int i = windowRadius; i < h - windowRadius; i += skipStep)
		{
			for (int j = windowRadius; j < w - windowRadius; j += skipStep)
			{
				double len = abs(ptrVely[j]) + abs(ptrVelx[j]);
				double angle = atan2(ptrVely[j], ptrVelx[j]) * 180 / CV_PI;
				if (len > 0.8)
				{
					kcvDrawArrow(frame, cvPoint(j, i), 10 * MIN(len, 2), angle, 3, 45, CV_RGB(255, 0, 0));
				}
			}
			ptrVelx += step * skipStep;
			ptrVely += step * skipStep;
		}
		cvShowImage("frame", frame);

		if (key == ' '){
			if (isPaused == 0){
				key = cvWaitKey(0);
				isPaused = 1;
			}
			else if (isPaused == 1){
				key = cvWaitKey(10);
				isPaused = 0;
			}
		}
		else if (key == '\x1b'){
			cvDestroyAllWindows();
			break;
		}
		else{
			if (isPaused == 0){
				key = cvWaitKey(10);
			}
			else if (isPaused == 1){
				key = cvWaitKey(0);
			}
		}
	}
	cvWaitKey(0);
	kcvOpticalFlowRelease(&handle);
	cvReleaseCapture(&capture);

	return 0;
}

正文

 

posted @ 2016-01-21 13:03  quarryman  阅读(4695)  评论(0编辑  收藏  举报