1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
|
#include "opencv2/video/tracking.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include <time.h>
#include <stdio.h>
#include <ctype.h>
static void help(void)
{
printf(
"\nThis program demonstrated the use of motion templates -- basically using the gradients\n"
"of thresholded layers of decaying frame differencing. New movements are stamped on top with floating system\n"
"time code and motions too old are thresholded away. This is the 'motion history file'. The program reads from the camera of your choice or from\n"
"a file. Gradients of motion history are used to detect direction of motoin etc\n"
"Usage :\n"
"./motempl [camera number 0-n or file name, default is camera 0]\n"
);
}
// various tracking parameters (in seconds)
const double MHI_DURATION = 1;
const double MAX_TIME_DELTA = 0.5;
const double MIN_TIME_DELTA = 0.05;
// number of cyclic frame buffer used for motion detection
// (should, probably, depend on FPS)
const int N = 4;
// ring image buffer
IplImage **buf = 0;
int last = 0;
// temporary images
IplImage *mhi = 0; // MHI
IplImage *orient = 0; // orientation
IplImage *mask = 0; // valid orientation mask
IplImage *segmask = 0; // motion segmentation map
CvMemStorage* storage = 0; // temporary storage
// parameters:
// img - input video frame
// dst - resultant motion picture
// args - optional parameters
static void update_mhi( IplImage* img, IplImage* dst, int diff_threshold )
{
double timestamp = (double)clock()/CLOCKS_PER_SEC; // get current time in seconds
CvSize size = cvSize(img->width,img->height); // get current frame size
int i, idx1 = last, idx2;
IplImage* silh;
CvSeq* seq;
CvRect comp_rect;
double count;
double angle;
CvPoint center;
double magnitude;
CvScalar color;
// allocate images at the beginning or
// reallocate them if the frame size is changed
if( !mhi || mhi->width != size.width || mhi->height != size.height ) {
if( buf == 0 ) {
buf = (IplImage**)malloc(N*sizeof(buf[0]));
memset( buf, 0, N*sizeof(buf[0]));
}
for( i = 0; i < N; i++ ) {
cvReleaseImage( &buf[i] );
buf[i] = cvCreateImage( size, IPL_DEPTH_8U, 1 );
cvZero( buf[i] );
}
cvReleaseImage( &mhi );
cvReleaseImage( &orient );
cvReleaseImage( &segmask );
cvReleaseImage( &mask );
mhi = cvCreateImage( size, IPL_DEPTH_32F, 1 );
cvZero( mhi ); // clear MHI at the beginning
orient = cvCreateImage( size, IPL_DEPTH_32F, 1 );
segmask = cvCreateImage( size, IPL_DEPTH_32F, 1 );
mask = cvCreateImage( size, IPL_DEPTH_8U, 1 );
}
cvCvtColor( img, buf[last], CV_BGR2GRAY ); // convert frame to grayscale
idx2 = (last + 1) % N; // index of (last - (N-1))th frame
last = idx2;
silh = buf[idx2];
cvAbsDiff( buf[idx1], buf[idx2], silh ); // get difference between frames
cvThreshold( silh, silh, diff_threshold, 1, CV_THRESH_BINARY ); // and threshold it
cvUpdateMotionHistory( silh, mhi, timestamp, MHI_DURATION ); // update MHI
// convert MHI to blue 8u image
cvCvtScale( mhi, mask, 255./MHI_DURATION,
(MHI_DURATION - timestamp)*255./MHI_DURATION );
cvZero( dst );
cvMerge( mask, 0, 0, 0, dst );
// calculate motion gradient orientation and valid orientation mask
cvCalcMotionGradient( mhi, mask, orient, MAX_TIME_DELTA, MIN_TIME_DELTA, 3 );
if( !storage )
storage = cvCreateMemStorage(0);
else
cvClearMemStorage(storage);
// segment motion: get sequence of motion components
// segmask is marked motion components map. It is not used further
seq = cvSegmentMotion( mhi, segmask, storage, timestamp, MAX_TIME_DELTA );
// iterate through the motion components,
// One more iteration (i == -1) corresponds to the whole image (global motion)
for( i = -1; i < seq->total; i++ ) {
if( i < 0 ) { // case of the whole image
comp_rect = cvRect( 0, 0, size.width, size.height );
color = CV_RGB(255,255,255);
magnitude = 100;
}
else { // i-th motion component
comp_rect = ((CvConnectedComp*)cvGetSeqElem( seq, i ))->rect;
if( comp_rect.width + comp_rect.height < 100 ) // reject very small components
continue;
color = CV_RGB(255,0,0);
magnitude = 30;
}
// select component ROI
cvSetImageROI( silh, comp_rect );
cvSetImageROI( mhi, comp_rect );
cvSetImageROI( orient, comp_rect );
cvSetImageROI( mask, comp_rect );
// calculate orientation
angle = cvCalcGlobalOrientation( orient, mask, mhi, timestamp, MHI_DURATION);
angle = 360.0 - angle; // adjust for images with top-left origin
count = cvNorm( silh, 0, CV_L1, 0 ); // calculate number of points within silhouette ROI
cvResetImageROI( mhi );
cvResetImageROI( orient );
cvResetImageROI( mask );
cvResetImageROI( silh );
// check for the case of little motion
if( count < comp_rect.width*comp_rect.height * 0.05 )
continue;
// draw a clock with arrow indicating the direction
center = cvPoint( (comp_rect.x + comp_rect.width/2),
(comp_rect.y + comp_rect.height/2) );
cvCircle( dst, center, cvRound(magnitude*1.2), color, 3, CV_AA, 0 );
cvLine( dst, center, cvPoint( cvRound( center.x + magnitude*cos(angle*CV_PI/180)),
cvRound( center.y - magnitude*sin(angle*CV_PI/180))), color, 3, CV_AA, 0 );
}
}
int main(int argc, char** argv)
{
IplImage* motion = 0;
CvCapture* capture = 0;
help();
if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
else if( argc == 2 )
capture = cvCaptureFromFile( argv[1] );
if( capture )
{
cvNamedWindow( "Motion", 1 );
for(;;)
{
IplImage* image = cvQueryFrame( capture );
if( !image )
break;
if( !motion )
{
motion = cvCreateImage( cvSize(image->width,image->height), 8, 3 );
cvZero( motion );
motion->origin = image->origin;
}
update_mhi( image, motion, 30 );
cvShowImage( "Motion", motion );
if( cvWaitKey(10) >= 0 )
break;
}
cvReleaseCapture( &capture );
cvDestroyWindow( "Motion" );
}
return 0;
}
#ifdef _EiC
main(1,"motempl.c");
#endif
|