JustMotion/src/main.cpp

325 lines
8.1 KiB
C++
Raw Normal View History

2022-04-14 09:45:54 -04:00
#include <iostream>
#include <thread>
#include <string>
#include <vector>
#include <unistd.h>
#include <time.h>
#include <stdlib.h>
#include <sys/stat.h>
#include <opencv4/opencv2/opencv.hpp>
#include <opencv4/opencv2/video/tracking.hpp>
#include <opencv4/opencv2/core/ocl.hpp>
#include <opencv4/opencv2/videoio.hpp>
using namespace cv;
using namespace std;
struct shared_t
{
vector<Mat> buff;
vector<thread> writers;
TermCriteria criteria;
string detectUrl;
string recordUrl;
string outDir;
string postMoCmd;
string postNoMoCmd;
string secsStr;
bool wrRunning;
bool ffRunning;
int motion;
int gap;
int secs;
2022-04-14 09:45:54 -04:00
} sharedRes;
string cleanDir(const string &path)
{
if (path[path.size() - 1] == '/')
{
return path.substr(0, path.size() - 1);
}
else
{
return path;
}
}
bool createDir(const string &dir)
{
auto ret = mkdir(dir.c_str(), 0777);
if (ret == -1)
{
return errno == EEXIST;
}
else
{
return true;
}
}
bool createDirTree(const string &full_path)
{
size_t pos = 0;
auto ret = true;
while (ret == true && pos != string::npos)
{
pos = full_path.find('/', pos + 1);
ret = createDir(full_path.substr(0, pos));
}
return ret;
}
void vidCap(shared_t *share)
{
if (share->buff.size() >= 100)
{
share->wrRunning = true;
2022-04-14 09:45:54 -04:00
time_t rawtime;
2022-04-14 09:45:54 -04:00
time(&rawtime);
2022-04-14 09:45:54 -04:00
auto timeinfo = localtime(&rawtime);
2022-04-14 09:45:54 -04:00
char dirName[20];
char fileName[20];
2022-04-14 09:45:54 -04:00
strftime(dirName, 20, "%Y%m%d", timeinfo);
strftime(fileName, 20, "%H%M%S.avi", timeinfo);
2022-04-14 09:45:54 -04:00
createDirTree(cleanDir(share->outDir) + string("/") + string(dirName));
auto dstPath = cleanDir(share->outDir) + string("/") + string(dirName) + string("/") + string(fileName);
auto codec = VideoWriter::fourcc('M', 'J', 'P', 'G');
auto fps = 25.0;
2022-04-14 09:45:54 -04:00
VideoWriter writer;
writer.open(dstPath, codec, fps, share->buff[0].size(), true);
if (!writer.isOpened())
{
cerr << "could not open the output video file for writing: " << dstPath;
}
else
2022-04-14 09:45:54 -04:00
{
for (; !share->buff.empty(); share->buff.erase(share->buff.begin()))
{
writer.write(share->buff[0]);
}
2022-04-14 09:45:54 -04:00
}
share->wrRunning = false;
2022-04-14 09:45:54 -04:00
}
}
void detectDiff(Mat prev, Mat next, shared_t *share)
2022-04-14 09:45:54 -04:00
{
// optical flow calculations are used to detect motion.
// reference: https://docs.opencv.org/3.4/d4/dee/tutorial_optical_flow.html
vector<Point2f> p0, p1;
vector<uchar> status;
vector<float> err;
goodFeaturesToTrack(prev, p0, 50, 0.5, 3, Mat(), 3, false, 0.04);
calcOpticalFlowPyrLK(prev, next, p0, p1, status, err, Size(10, 10), 2, share->criteria);
2022-04-14 09:45:54 -04:00
for(uint i = 0; i < p0.size(); i++)
2022-04-14 09:45:54 -04:00
{
if (norm(p0[i] - p1[i]) > share->gap)
2022-04-14 09:45:54 -04:00
{
share->motion += 150;
break;
}
else if (share->motion != 0)
{
share->motion -= 1;
2022-04-14 09:45:54 -04:00
}
}
}
void timer(shared_t *share)
{
sleep(share->secs);
if (share->motion == 0)
{
share->ffRunning = false;
}
if (!share->wrRunning)
{
new thread(vidCap, share);
}
2022-04-14 09:45:54 -04:00
}
void addFramesToBuffer(const vector<Mat> &newFrames, shared_t *share)
{
for (auto &&frame : newFrames)
{
share->buff.push_back(frame);
}
}
Mat toGray(const Mat &src)
{
Mat ret;
cvtColor(src, ret, COLOR_BGR2GRAY);
return ret;
}
2022-04-14 09:45:54 -04:00
void moDetect(shared_t *share)
{
auto dCap = VideoCapture(share->detectUrl, CAP_FFMPEG);
auto rCap = VideoCapture(share->recordUrl, CAP_FFMPEG);
2022-04-14 09:45:54 -04:00
vector<Mat> dFrames, rFrames;
Mat dFrame, rFrame;
2022-04-14 09:45:54 -04:00
while (share->ffRunning)
2022-04-14 09:45:54 -04:00
{
dCap >> dFrame;
rCap >> rFrame;
2022-04-14 09:45:54 -04:00
if (dFrame.empty())
2022-04-14 09:45:54 -04:00
{
// broken frames returned from the cameras i've tested this with would cause
// the entire capture connection to drop, hence why this bit of code is here
// to detect empty frames (signs of a dropped connection) and attempt
// re-connect to the cammera.
dCap.open(share->detectUrl, CAP_FFMPEG);
2022-04-14 09:45:54 -04:00
}
else if (rFrame.empty())
2022-04-14 09:45:54 -04:00
{
rCap.open(share->recordUrl, CAP_FFMPEG);
}
else if ((dFrames.size() < 2) || (rFrames.size() < 2))
{
rFrames.push_back(rFrame.clone());
dFrames.push_back(toGray(dFrame));
2022-04-14 09:45:54 -04:00
}
else
{
if (share->gap == 0)
{
// share->gap is used in detectDiff() to compare how far a
// point in the optical flow has moved. it is calculated by a
// certain percentage of the total pixel area of the frames.
// as of right now it is hard coded to 0.00579% of the total
// pixel area of the frames and only needs to be calculated
// once hence why share->gap == 0 is checked.
share->gap = ((double) 0.00579 / (double) 100) * (dFrame.size().height * dFrame.size().width);
}
if (share->motion != 0)
{
addFramesToBuffer(rFrames, share);
}
detectDiff(dFrames[0], dFrames[1], share);
rFrames.clear();
dFrames.clear();
2022-04-14 09:45:54 -04:00
}
}
if (share->motion != 0)
{
system(share->postMoCmd.c_str());
}
else
{
system(share->postNoMoCmd.c_str());
}
2022-04-14 09:45:54 -04:00
}
string parseForParam(const string &arg, int argc, char** argv)
{
for (int i = 0; i < argc; ++i)
{
auto argInParams = string(argv[i]);
if (arg.compare(argInParams) == 0)
{
// check ahead, make sure i + 1 won't cause out-of-range exception
if ((i + 1) <= (argc - 1))
{
return string(argv[i + 1]);
}
}
}
return string();
}
int main(int argc, char** argv)
{
auto vidRet = 0;
auto moRet = 0;
auto secsStr = parseForParam("-sec", argc, argv);
auto highUrl = parseForParam("-rs", argc, argv);
auto lowUrl = parseForParam("-ds", argc, argv);
auto outDir = parseForParam("-dir", argc, argv);
auto moCmd = parseForParam("-mc", argc, argv);
auto noMocmd = parseForParam("-nmc", argc, argv);
auto secs = strtol(secsStr.c_str(), NULL, 10);
if (lowUrl.empty())
{
cerr << "the detection-stream camera url is empty." << endl;
}
else if (highUrl.empty())
{
cerr << "the recording-stream camera url is empty." << endl;
}
else if (outDir.empty())
{
cerr << "the output directory is empty." << endl;
}
else if (secs == 0)
{
cerr << "the amount of seconds in -sec cannot be 0 or an invalid number was given." << endl;
}
else
{
sharedRes.criteria = TermCriteria((TermCriteria::COUNT) + (TermCriteria::EPS), 10, 0.03);
sharedRes.wrRunning = false;
sharedRes.motion = 0;
sharedRes.gap = 0;
2022-04-14 09:45:54 -04:00
while (true)
{
sharedRes.recordUrl = highUrl;
sharedRes.detectUrl = lowUrl;
sharedRes.postMoCmd = moCmd;
sharedRes.postNoMoCmd = noMocmd;
sharedRes.secsStr = secsStr;
sharedRes.secs = secs;
sharedRes.outDir = outDir;
sharedRes.ffRunning = true;
thread th1(timer, &sharedRes);
2022-04-14 09:45:54 -04:00
thread th2(moDetect, &sharedRes);
// Wait for the threads to finish
// Wait for thread t1 to finish
th1.join();
// Wait for thread t2 to finish
th2.join();
}
return 0;
}
return 1;
}