Merge branch 'master' of git://github.com/Kees-V/openalpr into keesmotion

This commit is contained in:
Matt Hill
2015-05-14 07:23:15 -04:00
3 changed files with 93 additions and 8 deletions

View File

@@ -31,6 +31,7 @@
#include "support/timing.h"
#include "support/platform.h"
#include "video/videobuffer.h"
#include "video/motiondetector.h"
#include "alpr.h"
using namespace alpr;
@@ -39,6 +40,8 @@ const std::string MAIN_WINDOW_NAME = "ALPR main window";
const bool SAVE_LAST_VIDEO_STILL = false;
const std::string LAST_VIDEO_STILL_LOCATION = "/tmp/laststill.jpg";
MotionDetector motiondetector;
bool do_motiondetection = true;
/** Function Headers */
bool detectandshow(Alpr* alpr, cv::Mat frame, std::string region, bool writeJson);
@@ -75,6 +78,7 @@ int main( int argc, const char** argv )
TCLAP::SwitchArg jsonSwitch("j","json","Output recognition results in JSON format. Default=off", cmd, false);
TCLAP::SwitchArg detectRegionSwitch("d","detect_region","Attempt to detect the region of the plate image. [Experimental] Default=off", cmd, false);
TCLAP::SwitchArg clockSwitch("","clock","Measure/print the total time to process image and all plates. Default=off", cmd, false);
TCLAP::SwitchArg motiondetect("", "motion", "Use motion detection on video file or stream. Default=off", cmd, false);
try
{
@@ -102,6 +106,7 @@ int main( int argc, const char** argv )
templatePattern = templatePatternArg.getValue();
topn = topNArg.getValue();
measureProcessingTime = clockSwitch.getValue();
do_motiondetection = motiondetect.getValue();
}
catch (TCLAP::ArgException &e) // catch any exceptions
{
@@ -156,7 +161,8 @@ int main( int argc, const char** argv )
while (cap.read(frame))
{
detectandshow(&alpr, frame, "", outputJson);
if (framenum == 0) motiondetector.ResetMotionDetection(&frame);
detectandshow(&alpr, frame, "", outputJson);
sleep_ms(10);
framenum++;
}
@@ -178,12 +184,14 @@ int main( int argc, const char** argv )
if (response != -1)
{
detectandshow( &alpr, latestFrame, "", outputJson);
if (framenum == 0) motiondetector.ResetMotionDetection(&latestFrame);
detectandshow(&alpr, latestFrame, "", outputJson);
}
// Sleep 10ms
sleep_ms(10);
}
framenum++;
}
videoBuffer.disconnect();
@@ -207,8 +215,8 @@ int main( int argc, const char** argv )
cv::imwrite(LAST_VIDEO_STILL_LOCATION, frame);
}
std::cout << "Frame: " << framenum << std::endl;
detectandshow( &alpr, frame, "", outputJson);
if (framenum == 0) motiondetector.ResetMotionDetection(&frame);
detectandshow(&alpr, frame, "", outputJson);
//create a 1ms delay
sleep_ms(1);
framenum++;
@@ -283,9 +291,14 @@ bool detectandshow( Alpr* alpr, cv::Mat frame, std::string region, bool writeJso
getTimeMonotonic(&startTime);
std::vector<AlprRegionOfInterest> regionsOfInterest;
regionsOfInterest.push_back(AlprRegionOfInterest(0,0, frame.cols, frame.rows));
AlprResults results = alpr->recognize(frame.data, frame.elemSize(), frame.cols, frame.rows, regionsOfInterest );
if (do_motiondetection)
{
cv::Rect rectan = motiondetector.MotionDetect(&frame);
if (rectan.width>0) regionsOfInterest.push_back(AlprRegionOfInterest(rectan.x, rectan.y, rectan.width, rectan.height));
}
else regionsOfInterest.push_back(AlprRegionOfInterest(0, 0, frame.cols, frame.rows));
AlprResults results;
if (regionsOfInterest.size()>0) results = alpr->recognize(frame.data, frame.elemSize(), frame.cols, frame.rows, regionsOfInterest);
timespec endTime;
getTimeMonotonic(&endTime);