Detect people wearing or not wearing a mask in a video stream.
/*
* Copyright (C) 2020 Digital Barriers plc. All rights reserved.
* Contact: http://www.digitalbarriers.com/
*
* This file is part of the Papillon SDK.
*
* You can't use, modify or distribute any part of this file without
* the explicit written agreements of Digital Barriers.
*/
#include <queue>
#include <thread>
#include <unordered_map>
#include <vector>
#include <PapillonCore.h>
USING_NAMESPACE_PAPILLON
#define RETURN_ERROR(Message, Result) \
{ \
PString m(Message); \
m += Result; \
P_LOG_DEBUG << m; \
return PResult::Error(m); \
}
PResult CreateFaceDetector(PDetector& detector) {
P_LOG_INFO << "Creating face detector...";
PProperties params;
PString detectorName = "FaceDetector6";
params.Set("numDetectors", 4); // number of detector worker threads
params.Set("numLocalisers", 2); // number of localiser worker threads
params.Set("scale", 0.25); // subsample image 4x before processing (with default model min possible
// detection will be about 60x60 pixels)
params.Set("minDetectionSize", 60); // faces less than 60x60 pixels will not be detected
params.Set("maxDetectionSize", 1000); // faces less than 60x60 pixels will not be detected
params.Set("maxDetections", 0); // max detections per frame (0 - unlimited)
params.Set("threshold", 0.); // use default threshold of detection score
params.Set("gpuId", -1); // use CPU for processing
params.Set("enableLocaliser", true); // enable face landmarks localisation (needed for describers)
PResult res = PDetector::Create(detectorName, params, detector);
if(!res) {
RETURN_ERROR("Failed to create face detector:", res);
}
return PResult::C_OK;
}
PResult CreateBioDescriber(PDescriber& describer) {
P_LOG_INFO << "Creating face recognition describer...";
PProperties dnnParameters = PProperties::CreateFromKeyValueString("type=FaceFast;"
"gpuId=-1;"
"batchSize=4;"
"sharedDnn=true");
PResult res = PDescriber::Create("DescriberDnn", dnnParameters, describer);
if(!res) {
RETURN_ERROR("Failed to create face recognition describer:", res);
}
return PResult::C_OK;
}
PResult CreateMetaDataDescriber(PDescriber& describer) {
P_LOG_INFO << "Creating metadata describer...";
"gpuId=-1;"
"batchSize=1;"
"sharedDnn=true");
PResult res = PDescriber::Create("DescriberDnn", dnnParameters, describer);
if(!res) {
RETURN_ERROR("Failed to create metadata describer:", res);
}
return PResult::C_OK;
}
// Class for preparing image to be shown
class ShowEventsC {
public:
explicit ShowEventsC(bool drawLandmarks)
: m_drawLandmarks(drawLandmarks) {}
void SetWatchList(const PWatchlist& watchlist) { m_watchlist = watchlist; }
void SetRoi(const PRectanglei& roi) { m_roi = roi; }
void StartTimer() {
m_timer = PTimer();
m_framesTotal = m_framesSkipped = m_totalFaces = 0;
}
void QueueEvents(const PList& events) {
for(int32 i = 0, ni = events.Size(); i < ni; i++) {
PEvent event;
events.Get(i, event);
m_eventQueue.push(event);
}
}
bool ShowEvents() {
bool imageReady = false;
while(!m_eventQueue.empty()) {
PEvent event = m_eventQueue.front();
m_eventQueue.pop();
const PString& eventType = event.GetType();
P_LOG_TRACE << "Processing event:" << eventType;
if(eventType == "Face") {
m_totalFaces++;
const PProperties& eventProperties = event.GetPayload();
PDetection detection;
if(!eventProperties.Get("Detection", detection)) {
P_LOG_ERROR << "Failed to get detection";
continue;
}
// get meta data
PGuid subjectId;
if(!eventProperties.Get("SubjectId", subjectId))
P_LOG_ERROR << "Failed to get subjectId";
double subjectIdConfidence = 0;
if(!eventProperties.Get("SubjectIdConfidence", subjectIdConfidence))
P_LOG_ERROR << "Failed to get subjectIdConfidence";
P_LOG_TRACE << "Subject:" << subjectId << " confidence:" << subjectIdConfidence;
PDescription description;
if(!eventProperties.Get("Description", description))
P_LOG_ERROR << "Failed to get description";
PFaceMetaData faceMetaData = PFaceMetaData::FromDescription(description);
PDescription subjectDescription;
bool subjectKnown = false;
if(m_watchlist.GetDescription(subjectId, subjectDescription).Ok())
subjectKnown = true;
// Draw bounding box
PUtils::DrawDetection(m_workImage, detection, PColour3i::Red(), false, !m_drawLandmarks).LogIfError();
// Draw meta data label
auto metaClassifications = faceMetaData.GetClassificationNames();
if(metaClassifications.Size() == 1) {
const PString& classification = metaClassifications.Get(0);
int32 classIndex{-1};
float classConfidence{0.f};
if(faceMetaData.GetClassIndex(classification, classIndex, classConfidence).Ok()) {
PString classLabel;
faceMetaData.GetClassLabel(classification, classLabel);
m_workImage, detection,
PString("%1 (%2)").Arg(classLabel).Arg(classConfidence, 0, 'g', 2),
(classIndex == 1) ? PUtils::E_BLUE
: ((classIndex == 0) ? PUtils::E_PINK : PUtils::E_BLACK),
1, 1,
(classIndex == 1) ? PColour3i::Blue()
: ((classIndex == 0) ? PColour3i::Pink() : PColour3i::White())))
P_LOG_ERROR << "Failed to draw face metadata label";
}
}
// Draw name label
PColour3i fontColour(0, 0, 0);
PString tagColour = "#95a5a6";
PString message;
if(subjectKnown) {
PString tag;
PString name =
PString("%0: %1").Arg(subjectDescription.GetName()).Arg(subjectIdConfidence, 0, 'g', 2);
if(subjectDescription.GetProperties().Get("Tag", tag).Ok()) {
subjectDescription.GetProperties().Get("TagColour", tagColour);
message = PString("%0 @ %1").Arg(name).Arg(tag);
} else {
message = name;
}
} else {
message = "Unknown";
}
if(!PUtils::DrawLabel(m_workImage, detection, message, PColour3i(tagColour), fontColour,
papillon::PUtils::E_TOP_CENTRE, 1, 0)) {
P_LOG_ERROR << "Failed to draw name label";
}
// add face to tracks
PGuid sightingId;
if(!eventProperties.Get("SightingId", sightingId)) {
P_LOG_ERROR << "Failed to get sightingId";
continue;
}
} else if(eventType == "FrameStart") {
const PProperties& eventProperties = event.GetPayload();
PFrame frame;
if(!eventProperties.Get("Frame", frame)) {
P_LOG_ERROR << "Failed to get frame";
continue;
}
P_LOG_TRACE << "Went on new frame:" << frame.GetFrameNumber();
static int lastFrameNo = 0;
if(frame.GetFrameNumber() - lastFrameNo > 1) {
P_LOG_ERROR << "Missed frames between: " << lastFrameNo << " - " << frame.GetFrameNumber();
}
lastFrameNo = frame.GetFrameNumber();
m_workImage = frame.GetImage().Clone();
// show roi
if(m_roi.IsValid()) {
m_workImage.DrawRectangle(m_roi, PColour3i(255, 0, 0), 1);
}
m_skipImage = false;
} else if(eventType == "FrameEnd") {
// draw tracks
const PProperties& eventProperties = event.GetPayload();
PFrame frame;
if(!eventProperties.Get("Frame", frame)) {
P_LOG_ERROR << "Failed to get frame";
continue;
}
++m_framesTotal;
if(!m_skipImage) {
m_readyImage = m_workImage;
imageReady = true;
break;
}
} else if(eventType == "FrameSkipped") {
++m_framesSkipped;
m_skipImage = true;
}
}
return imageReady;
}
const PImage& GetImage() const { return m_readyImage; }
PString GetFpsString() const {
return PString("fps %0 (%1) faces per sec %3 faces per frame %4")
.Arg(double(m_framesTotal - m_framesSkipped) / m_timer.ElapsedSec(), 4, 'f', 2)
.Arg(double(m_framesTotal) / m_timer.ElapsedSec(), 4, 'f', 1)
.Arg(double(m_totalFaces) / m_timer.ElapsedSec(), 6, 'f', 1)
.Arg(double(m_totalFaces) / double(m_framesTotal - m_framesSkipped), 4, 'f', 1);
}
private:
PImage m_workImage;
bool m_skipImage{false};
PImage m_readyImage;
PWatchlist m_watchlist;
PTimer m_timer;
int64 m_framesTotal{0};
int64 m_framesSkipped{0};
int64 m_totalFaces{0};
PRectanglei m_roi;
bool m_drawLandmarks{false};
std::queue<PEvent> m_eventQueue;
};
class ProcessThreadC : public PRunnable {
public:
ProcessThreadC() = default;
~ProcessThreadC() override = default;
PResult Init(const PProperties& parameters, const PString& videoFile) {
m_displayName =
PString("Result:" + videoFile + " " + PString(std::hash<std::thread::id>{}(std::this_thread::get_id())));
// open video stream
P_LOG_INFO << "Trying to open'" << videoFile << "'";
PResult resVal = PInputVideoStream::Open(videoFile, m_ivs).LogIfError();
if(resVal.Failed()) {
P_LOG_ERROR << "Error in opening video stream:" << videoFile;
return resVal;
}
m_videoFrameRate = std::max(1., m_ivs.GetFrameratePerSecond());
P_LOG_INFO << "Video stream is opened frame rate:" << m_videoFrameRate;
// create FaceLog
P_LOG_INFO << "Creating FaceLog6 instance";
resVal = PAnalytics::Create("FaceLog6", parameters, m_faceLog).LogIfError();
if(resVal.Failed()) {
P_LOG_ERROR << "Failed to create: FaceLog6 with parameters:" << parameters;
return resVal;
}
P_LOG_INFO << "Created FaceLog6 instance";
return PResult::C_OK;
}
void Run() override {
// processing loop
P_LOG_INFO << "Starting main processing loop";
PFrame frame;
PList events;
while(m_ivs.GetFrame(frame).Ok()) {
if(!m_faceLog.Apply(frame, events).LogIfError().Ok()) {
P_LOG_ERROR << "Error in analytics";
break;
}
ProcessEvents(events);
}
if(!m_faceLog.Finish(events).LogIfError().Ok()) {
P_LOG_ERROR << "Error in analytics";
}
ProcessEvents(events);
P_LOG_INFO << "Finished processing";
}
void ProcessEvents(const PList& events) {
m_showEvents.QueueEvents(events);
if(m_showEvents.ShowEvents()) {
const PImage& img = m_showEvents.GetImage();
img.Display(m_displayName, int32(1000 / m_videoFrameRate))
.LogIfError(); // wait to limit display frame rate
}
}
private:
PAnalytics m_faceLog;
ShowEventsC m_showEvents{false};
PString m_displayName;
double m_videoFrameRate{25};
};
const PString SAMPLE_DIR = PPath::Join(PUtils::GetEnv("PAPILLON_INSTALL_DIR"),
"Data",
"Samples"); // path to find sample data: $PAPILLON_INSTALL_DIR/Data/Samples
void RunDemo() {
// create list of sources
std::vector<std::string> sources;
sources.emplace_back(PPath::Join(SAMPLE_DIR, "busy_office.avi").c_str());
// sources.push_back("/bigbuf/alexey/Projects/Mask detection/videos/shutterstock_1046171242.mov");
PProperties parameters;
parameters.Set("MaxFaceDetectorFR", -1.); // limit frame rate of face detector to 8 fps (to no overwhelm CPU)(if
// processing video files better disable this with -1.)
// create face detector
PDetector faceDetector;
CreateFaceDetector(faceDetector).OrDie();
parameters.Set("FaceDetector", faceDetector);
// create face recognition describer
PDescriber bioDescriber;
CreateBioDescriber(bioDescriber).OrDie();
parameters.Set("FaceRecognitionDescriber", bioDescriber);
// create metadata describer
PDescriber mdDescriber;
CreateMetaDataDescriber(mdDescriber);
parameters.Set("MDDescriber", mdDescriber);
std::vector<ProcessThreadC> tasks; // important! we need to keep tasks until threads finish
std::vector<PConcurrentThread> threads;
// start processing threads
P_LOG_INFO << "-------------------- Initialising tasks";
for(const auto& source : sources) {
tasks.emplace_back(ProcessThreadC());
if(tasks.back().Init(parameters, source).LogIfError().Failed()) {
P_LOG_ERROR << "Failed to init processing thread.";
return;
}
}
P_LOG_INFO << "-------------------- Lunching processing threads";
for(auto& task : tasks) {
threads.push_back(PConcurrentThread(task));
}
// wait for threads to finish
P_LOG_INFO << "-------------------- Waiting for threads";
for(auto& it : threads) {
it.Join();
}
}
int main() {
const bool debug{false};
if(debug) {
}
RunDemo();
return 0;
}