From 91b950df74e6bdec5eb0fcac074be4a967b60505 Mon Sep 17 00:00:00 2001 From: Maurice ONeal Date: Thu, 14 Apr 2022 09:45:54 -0400 Subject: [PATCH] Initial commit --- .gitignore | 59 +++++++++++ CMakeLists.txt | 7 ++ src/main.cpp | 273 +++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 339 insertions(+) create mode 100644 .gitignore create mode 100644 CMakeLists.txt create mode 100755 src/main.cpp diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..714f541 --- /dev/null +++ b/.gitignore @@ -0,0 +1,59 @@ +# C++ objects and libs +*.slo +*.lo +*.o +*.a +*.la +*.lai +*.so +*.dll +*.dylib + +# Qt-es +object_script.*.Release +object_script.*.Debug +*_plugin_import.cpp +/.qmake.cache +/.qmake.stash +*.pro.user +*.pro.user.* +*.qbs.user +*.qbs.user.* +*.moc +moc_*.cpp +moc_*.h +qrc_*.cpp +ui_*.h +*.qmlc +*.jsc +Makefile* +*build-* +/build +/app_dir +/release +/debug + +# Qt unit tests +target_wrapper.* + +# QtCreator +*.autosave + +# QtCreator Qml +*.qmlproject.user +*.qmlproject.user.* + +# QtCreator CMake +CMakeLists.txt.user* + +# QtCreator 4.8< compilation database +compile_commands.json + +# QtCreator local machine specific files for imported projects +*creator.user* + +# VSCode +/.vscode + +# Build folder +/build diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..c62ca87 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,7 @@ +cmake_minimum_required(VERSION 2.8) +project( MotionWatch ) +find_package( OpenCV REQUIRED ) +SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -pthread") +include_directories( ${OpenCV_INCLUDE_DIRS} ) +add_executable( mow src/main.cpp ) +target_link_libraries( mow ${OpenCV_LIBS} ) diff --git a/src/main.cpp b/src/main.cpp new file mode 100755 index 0000000..e7cc232 --- /dev/null +++ b/src/main.cpp @@ -0,0 +1,273 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +using namespace cv; +using namespace std; + +struct shared_t +{ + string detectUrl; + string recordUrl; + string outDir; + string postMoCmd; + string postNoMoCmd; + string secsStr; + string camId; + bool ffRunning; + bool motion; + int secs; + int ignore; + +} sharedRes; + +string cleanDir(const string &path) +{ + if (path[path.size() - 1] == '/') + { + return path.substr(0, path.size() - 1); + } + else + { + return path; + } +} + +bool createDir(const string &dir) +{ + auto ret = mkdir(dir.c_str(), 0777); + + if (ret == -1) + { + return errno == EEXIST; + } + else + { + return true; + } +} + +bool createDirTree(const string &full_path) +{ + size_t pos = 0; + auto ret = true; + + while (ret == true && pos != string::npos) + { + pos = full_path.find('/', pos + 1); + ret = createDir(full_path.substr(0, pos)); + } + + return ret; +} + +void vidCap(shared_t *share) +{ + time_t rawtime; + + time(&rawtime); + + auto timeinfo = localtime(&rawtime); + + char dirName[20]; + char fileName[20]; + + strftime(dirName, 20, "%Y%m%d", timeinfo); + strftime(fileName, 20, "%H%M%S.ts", timeinfo); + + auto tmpFile = string("/tmp/mow/") + share->camId + ".ts"; + auto ffmpegCmd = string("ffmpeg -hide_banner -loglevel error -i ") + share->recordUrl + string(" -t ") + share->secsStr + string(" -y -vcodec copy ") + tmpFile; + + createDirTree(string("/tmp/mow")); + system(ffmpegCmd.c_str()); + + share->ffRunning = false; + + if (share->motion) + { + if (!share->postMoCmd.empty()) + { + system(share->postMoCmd.c_str()); + } + + createDirTree(cleanDir(share->outDir) + string("/") + string(dirName)); + + auto dstPath = cleanDir(share->outDir) + string("/") + string(dirName) + string("/") + string(fileName); + + system(string("mv " + tmpFile + " " + dstPath).c_str()); + } + else + { + if (!share->postNoMoCmd.empty()) + { + system(share->postNoMoCmd.c_str()); + } + + system(string("rm " + tmpFile).c_str()); + } +} + +void detectDiff(const Mat &prev, const Mat &next, shared_t *share) +{ + // optical flow calculations are used to detect motion. + // reference: https://docs.opencv.org/3.4/d4/dee/tutorial_optical_flow.html + vector p0, p1; + vector status; + vector err; + + auto criteria = TermCriteria((TermCriteria::COUNT) + (TermCriteria::EPS), 10, 0.03); + + // distance is basically 0.0578% of the total pixel area of the + // frames. this value is used later below. + auto distance = ((double) 0.0578 / (double) 100) * (prev.size().height * prev.size().width); + auto count = 0; + + goodFeaturesToTrack(prev, p0, 100, 0.3, 7, Mat(), 7, false, 0.04); + calcOpticalFlowPyrLK(prev, next, p0, p1, status, err, Size(10, 10), 2, criteria); + + for(uint i = 0; (i < p0.size()) && !share->motion; i++) + { + // select good points + if(status[i] == 1) + { + if (count == 5) + { + share->motion = true; + } + else if (norm(p0[i] - p1[i]) > distance) + { + // any points that moved 0.0578% or more of the total pixel + // area can be considered motion. + // the count variable is there to make sure mutiple points + // are calling out motion. this prevents false positives + // due to insects or other small objects like grass, bush + // etc... + count += 1; + } + } + } +} + +void moDetect(shared_t *share) +{ + auto cap = VideoCapture(share->detectUrl, CAP_FFMPEG); + + Mat firstFrame, currentFrame, frame; + + while (share->ffRunning && !share->motion) + { + cap >> frame; + + if (frame.empty()) + { + // broken frames returned from the cameras i've tested this with would cause + // the entire capture connection to drop, hence why this bit of code is here + // to detect empty frames (signs of a dropped connection) and attempt + // re-connect to the cammera. + cap.open(share->detectUrl, CAP_FFMPEG); + } + else if (firstFrame.empty()) + { + cvtColor(frame, firstFrame, COLOR_BGR2GRAY); + } + else + { + cvtColor(frame, currentFrame, COLOR_BGR2GRAY); + detectDiff(firstFrame, currentFrame, share); + } + } +} + +string parseForParam(const string &arg, int argc, char** argv) +{ + for (int i = 0; i < argc; ++i) + { + auto argInParams = string(argv[i]); + + if (arg.compare(argInParams) == 0) + { + // check ahead, make sure i + 1 won't cause out-of-range exception + if ((i + 1) <= (argc - 1)) + { + return string(argv[i + 1]); + } + } + } + + return string(); +} + +int main(int argc, char** argv) +{ + auto vidRet = 0; + auto moRet = 0; + auto secsStr = parseForParam("-sec", argc, argv); + auto highUrl = parseForParam("-rs", argc, argv); + auto lowUrl = parseForParam("-ds", argc, argv); + auto outDir = parseForParam("-dir", argc, argv); + auto moCmd = parseForParam("-mc", argc, argv); + auto noMocmd = parseForParam("-nmc", argc, argv); + auto camId = parseForParam("-id", argc, argv); + auto secs = strtol(secsStr.c_str(), NULL, 10); + + if (lowUrl.empty()) + { + cerr << "the detection-stream camera url is empty." << endl; + } + else if (highUrl.empty()) + { + cerr << "the recording-stream camera url is empty." << endl; + } + else if (outDir.empty()) + { + cerr << "the output directory is empty." << endl; + } + else if (camId.empty()) + { + cerr << "the camera id is empty." << endl; + } + else if (secs == 0) + { + cerr << "the amount of seconds in -sec cannot be 0 or an invalid number was given." << endl; + } + else + { + while (true) + { + sharedRes.recordUrl = highUrl; + sharedRes.detectUrl = lowUrl; + sharedRes.postMoCmd = moCmd; + sharedRes.postNoMoCmd = noMocmd; + sharedRes.secsStr = secsStr; + sharedRes.secs = secs; + sharedRes.outDir = outDir; + sharedRes.camId = camId; + sharedRes.ffRunning = true; + sharedRes.motion = false; + sharedRes.ignore = 0; + + thread th1(vidCap, &sharedRes); + thread th2(moDetect, &sharedRes); + + // Wait for the threads to finish + // Wait for thread t1 to finish + th1.join(); + // Wait for thread t2 to finish + th2.join(); + } + + return 0; + } + + return 1; +}