Commit abe959b8 authored by wenboqiang's avatar wenboqiang

1.add facemask detect

2.更新 classifypost 通过 cls label size 指定分类网络输出label数量,不在写死。
3.更新 classifyprepost 通过modelinfo 传入 inputshape
parent 77cf31ca
...@@ -108,6 +108,7 @@ add_executable(main ...@@ -108,6 +108,7 @@ add_executable(main
inference/onlyDetectInference.cpp inference/onlyDetectInference.cpp
detectPreprocess/helmetPreprocess.cpp detectPreprocess/helmetPreprocess.cpp
inference/inferFactory.cpp inference/inferFactory.cpp
detectPostprocess/centerfacePostprocess.cpp
mqtt/mqttsend.cpp mqtt/mqttsend.cpp
main.cpp ) main.cpp )
......
...@@ -119,7 +119,7 @@ AclLiteError ClassifyPostprocessThread::Init() ...@@ -119,7 +119,7 @@ AclLiteError ClassifyPostprocessThread::Init()
if (ret != ACLLITE_OK) { if (ret != ACLLITE_OK) {
return ACLLITE_ERROR; return ACLLITE_ERROR;
} }
kEachResultTensorNum_ = modelInfo_.clsLabel.size();
if (outputType_ == "video") { if (outputType_ == "video") {
ret = GetOutputFrameResolution(outputFrameWidth_, outputFrameHeight_, channelId_); ret = GetOutputFrameResolution(outputFrameWidth_, outputFrameHeight_, channelId_);
if (ret != ACLLITE_OK) { if (ret != ACLLITE_OK) {
...@@ -321,11 +321,11 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD ...@@ -321,11 +321,11 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD
outData = reinterpret_cast<float*>(data); outData = reinterpret_cast<float*>(data);
float maxconf = 0; float maxconf = 0;
for (int i = 0; i < objDetectDataMsg->objInfo.size(); i++) { for (int i = 0; i < objDetectDataMsg->objInfo.size(); i++) {
int maxConfidentIndex = i * kEachResultTensorNum; int maxConfidentIndex = i * kEachResultTensorNum_;
float confsum = 0.0; float confsum = 0.0;
for (int j = 0; j < kEachResultTensorNum; j++) { for (int j = 0; j < kEachResultTensorNum_; j++) {
int index = i * kEachResultTensorNum + j; int index = i * kEachResultTensorNum_ + j;
confsum += std::exp(outData[index]); confsum += std::exp(outData[index]);
if (outData[index] > outData[maxConfidentIndex]) { if (outData[index] > outData[maxConfidentIndex]) {
maxConfidentIndex = index; maxConfidentIndex = index;
...@@ -333,10 +333,10 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD ...@@ -333,10 +333,10 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD
} }
maxconf = std::exp(outData[maxConfidentIndex]) / confsum; maxconf = std::exp(outData[maxConfidentIndex]) / confsum;
if (maxconf < 0.25){ // if (maxconf < 0.25){
ACLLITE_LOG_ERROR("cls conf < 0.25"); // ACLLITE_LOG_ERROR("cls conf < 0.25");
} // }
int colorIndex = maxConfidentIndex - i * kEachResultTensorNum; int colorIndex = maxConfidentIndex - i * kEachResultTensorNum_;
objDetectDataMsg->objInfo[i].classify_result = modelInfo_.clsLabel[colorIndex]; objDetectDataMsg->objInfo[i].classify_result = modelInfo_.clsLabel[colorIndex];
objDetectDataMsg->objInfo[i].detect_result = modelInfo_.clsLabel[colorIndex] + '_' +std::to_string(maxconf); objDetectDataMsg->objInfo[i].detect_result = modelInfo_.clsLabel[colorIndex] + '_' +std::to_string(maxconf);
} }
......
...@@ -29,8 +29,8 @@ uint32_t kModelWidth = 256; ...@@ -29,8 +29,8 @@ uint32_t kModelWidth = 256;
uint32_t kModelHeight = 256; uint32_t kModelHeight = 256;
} }
ClassifyPreprocessThread::ClassifyPreprocessThread(aclrtRunMode& runMode) ClassifyPreprocessThread::ClassifyPreprocessThread(aclrtRunMode& runMode,ModelInfo programinfo)
:runMode_(runMode), modelWidth_(kModelWidth), modelHeight_(kModelHeight) :runMode_(runMode), modelWidth_(programinfo.clsifyInputWidth), modelHeight_(programinfo.clsifyInputHeight),programinfo_(programinfo)
{ {
} }
...@@ -46,6 +46,7 @@ AclLiteError ClassifyPreprocessThread::Init() ...@@ -46,6 +46,7 @@ AclLiteError ClassifyPreprocessThread::Init()
ACLLITE_LOG_ERROR("Dvpp init failed, error %d", aclRet); ACLLITE_LOG_ERROR("Dvpp init failed, error %d", aclRet);
return ACLLITE_ERROR; return ACLLITE_ERROR;
} }
return ACLLITE_OK; return ACLLITE_OK;
} }
...@@ -101,7 +102,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs) ...@@ -101,7 +102,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs)
// ret = dvpp_.Resize(carImgs[i].resizedImgs, carImgs[i].cropedImgs, // ret = dvpp_.Resize(carImgs[i].resizedImgs, carImgs[i].cropedImgs,
// kModelWidth, kModelHeight); // kModelWidth, kModelHeight);
// } // }
ret = dvpp_.CropPaste(carImgs[i].resizedImgs,carImgs[i].cropedImgs, kModelWidth, kModelHeight, ret = dvpp_.CropPaste(carImgs[i].resizedImgs,carImgs[i].cropedImgs, modelWidth_, modelHeight_,
0, 0, carImgs[i].cropedImgs.width,carImgs[i].cropedImgs.height); 0, 0, carImgs[i].cropedImgs.width,carImgs[i].cropedImgs.height);
// if ((int) i ==4){ // if ((int) i ==4){
...@@ -111,7 +112,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs) ...@@ -111,7 +112,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs)
// cv::Mat yuvimg(padingImage.alignHeight*1.5, padingImage.alignWidth, CV_8UC1, padingImage.data.get()); // cv::Mat yuvimg(padingImage.alignHeight*1.5, padingImage.alignWidth, CV_8UC1, padingImage.data.get());
// cv::Mat saveframe; // cv::Mat saveframe;
// cv::cvtColor(yuvimg, saveframe, CV_YUV2BGR_NV12); // cv::cvtColor(yuvimg, saveframe, CV_YUV2BGR_NV12);
// cv::imwrite("../data/pic/copysleep" + to_string(i) +".jpg",saveframe); // cv::imwrite("../data/pic/copyface" + to_string(i) +".jpg",saveframe);
// } // }
if (ret) { if (ret) {
......
//
// Created by 乾三 on 2023/4/18.
//
#include "../include/centerfacePostprocess.h"
namespace {
const int kwidth = 640;
const int kheight = 448;
// enum BBoxIndex { TOPLEFTX = 0, TOPLEFTY = 1, BOTTOMRIGHTX = 2, BOTTOMRIGHTY = 3, SCORE = 4, LABEL = 5 };
}
centerfacePostprocessThread::centerfacePostprocessThread() {
}
std::vector<BBoxstr> centerfacePostprocessThread::outPutDecode(float* heatmap, float* scale, float* offset, float* landmark, int width, int height, std::vector<std::vector<float>>& lms,int oriwidth,int oriheight) {
std::vector<BBoxstr> detectResults;
int len = width * height;
int c0, c1;
float s0, s1, o0, o1, s, x1, y1,widthScale,heightScale;
lms.clear();
std::vector<float> lm;
for (int i = 0; i < len; i++) {
if (heatmap[i] > threshold_) {
widthScale = float(kwidth) / float(oriwidth);
heightScale = float(kheight) / float(oriheight);
BBoxstr boundBox;
c0 = i / width;
c1 = i % width;
s0 = exp(scale[c0 * width + c1]) * 4 ;
s1 = exp(scale[len + c0 * width + c1]) * 4 ;
o0 = offset[c0 * width + c1];
o1 = offset[len + c0 * width + c1];
s = heatmap[i];
x1 = std::max(0.f, (c1 + o1 + 0.5f) * 4 - s1 / 2);
y1 = std::max(0.f, (c0 + o0 + 0.5f) * 4 - s0 / 2);
x1 = std::min(x1, (float)kwidth);
y1 = std::min(y1, (float)kheight);
// if (widthScale < heightScale){ // y 轴需要补充 x轴缩放
// boundBox.rect.ltX = x1 / widthScale;
// (1 - (oriheight * widthScale) / kheight)
//// boundBox.rect.ltY = y1;
// boundBox.rect.ltY = (int)(y1 - ((kheight - (oriheight / widthScale))/2));
// boundBox.rect.rbX = std::min(x1 + s1, (float)kwidth);
// boundBox.rect.rbY = std::min(y1 + s0, (float)kheight);
// boundBox.cls = 0;
// boundBox.score = s;
//
// } else{
// boundBox.rect.ltX = x1;
// boundBox.rect.ltY = y1;
// boundBox.rect.rbX = std::min(x1 + s1, (float)kwidth);
// boundBox.rect.rbY = std::min(y1 + s0, (float)kheight);
// boundBox.cls = 0;
// boundBox.score = s;
// }
if (heightScale > widthScale)
{
boundBox.rect.ltX = std::max((int)(x1 / widthScale),1);
boundBox.rect.rbX = std::min((int)((x1 + s1) / widthScale),oriwidth) ;
boundBox.rect.ltY = std::max((int)((((y1) - (kheight - widthScale * oriheight) / 2)) / widthScale),1);
boundBox.rect.rbY = std::min((int)((((y1 + s0) - (kheight - widthScale * oriheight) / 2)) / widthScale),oriheight - 1);
}
else
{
boundBox.rect.ltX = std::max((int)(((x1) - (kwidth - heightScale * oriwidth) / 2) / heightScale),1);
boundBox.rect.rbX = std::min((int)( ((x1 + s1) - (kwidth - heightScale * oriwidth) / 2) / heightScale),oriwidth-1);
boundBox.rect.ltY = std::max((int)((y1) / heightScale),0);
boundBox.rect.rbY = std::min((int)((y1 + s0) / heightScale),oriheight);
}
boundBox.cls = 0;
boundBox.score = s;
detectResults.push_back(boundBox);
// if (landmarks) {
// lm.clear();
// for (int j = 0; j < 5; j++) {
// lm.push_back(landmark[j * 2 + 1 * len + c0 * width + c1] * s1 + x1);
// lm.push_back(landmark[j * 2 + c0 * width + c1] * s0 + y1);
// }
// lms.push_back(lm);
// }
}
}
std::vector<BBoxstr> bboxesNew = nms(modelInfo_.nmsThresh, detectResults, modelInfo_.classnum);
return bboxesNew;
}
AclLiteError centerfacePostprocessThread::InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg)
{
if (objDetectDataMsg->isLastFrame)
return ACLLITE_OK;
float* heatmap = (float *)objDetectDataMsg->detectInferData[0].data.get();
float* scale = (float *)objDetectDataMsg->detectInferData[1].data.get();
float* offset = (float *)objDetectDataMsg->detectInferData[2].data.get();
float* landmark = (float *)objDetectDataMsg->detectInferData[3].data.get();
if (heatmap == nullptr) {
ACLLITE_LOG_ERROR("detect inferoutput is null\n");
return ACLLITE_ERROR;
}
totalBox_ = modelInfo_.totalBox;
std::vector<std::vector<float>> lms;
std::vector<BBoxstr> bboxesNew = outPutDecode(heatmap, scale, offset, landmark, kwidth/4, kheight/4,lms,objDetectDataMsg->imageFrame.width,objDetectDataMsg->imageFrame.height);
objDetectDataMsg->objInfo = static_cast<const std::vector<ObjInfo>>(NULL);
for (auto& bboxesNew_i : bboxesNew)
{
ObjInfo objInfo;
objInfo.rectangle.lt.x = bboxesNew_i.rect.ltX;
objInfo.rectangle.lt.y = bboxesNew_i.rect.ltY;
objInfo.rectangle.rb.x = bboxesNew_i.rect.rbX;
objInfo.rectangle.rb.y = bboxesNew_i.rect.rbY;
auto constr = std::to_string(bboxesNew_i.score);
constr = constr.substr(0, constr.find(".") + 3);
objInfo.detect_result = modelInfo_.Label[bboxesNew_i.cls] + '_'+ constr;
objDetectDataMsg->objInfo.emplace_back(objInfo);
}
return ACLLITE_OK;
}
AclLiteError centerfacePostprocessThread::Init() {
return DetectPostprocessThread::Init();
}
AclLiteError centerfacePostprocessThread::Process(int msgId, std::shared_ptr<void> data) {
return DetectPostprocessThread::Process(msgId, data);
}
centerfacePostprocessThread::centerfacePostprocessThread(const char *&configFile, int channelId, ModelInfo programinfo)
: DetectPostprocessThread(configFile, channelId, programinfo) {
}
centerfacePostprocessThread::~centerfacePostprocessThread() {
}
std::vector<BBoxstr> centerfacePostprocessThread::nms(const float nmsThresh, std::vector<BBoxstr> &binfo, const uint numClasses) {
return DetectPostprocessThread::nmsAllClasses(nmsThresh, binfo, numClasses);
}
//std::vector<BBoxstr> centerfacePostprocessThread::nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo) {
// return DetectPostprocessThread::nonMaximumSuppression(nmsThresh, binfo);
//}
//std::vector<BBoxstr> centerfacePostprocessThread::nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo) {
// return DetectPostprocessThread::nonMaximumSuppression(nmsThresh, binfo);
//}
...@@ -259,10 +259,6 @@ AclLiteError DetectPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectDat ...@@ -259,10 +259,6 @@ AclLiteError DetectPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectDat
return ACLLITE_OK; return ACLLITE_OK;
} }
//DetectPostprocessThread::DetectPostprocessThread(ModelInfo programinfo)
// :modelInfo_(programinfo)
//{
//}
DetectPostprocessThread::DetectPostprocessThread(const char*& configFile, int channelId,ModelInfo programinfo) DetectPostprocessThread::DetectPostprocessThread(const char*& configFile, int channelId,ModelInfo programinfo)
:configFile_(configFile), channelId_(channelId), modelInfo_(programinfo) :configFile_(configFile), channelId_(channelId), modelInfo_(programinfo)
{ {
......
//
// Created by 乾三 on 2023/4/18.
//
#ifndef HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
#define HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
#include "../include/detectPostprocess.h"
#include <iostream>
#include <vector>
#include <opencv2/opencv.hpp>
//#include <opencv2/dnn.hpp>
class centerfacePostprocessThread : public DetectPostprocessThread{
public:
centerfacePostprocessThread();
// DetectPostprocessThread(ModelInfo programinfo);
centerfacePostprocessThread(const char *&configFile, int channelId, ModelInfo programinfo);
~centerfacePostprocessThread();
AclLiteError Init();
AclLiteError Process(int msgId, std::shared_ptr<void> data);
std::vector<BBoxstr> nms(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses);
std::vector<BBoxstr> outPutDecode(float* heatmap, float* scale, float* offset, float* landmark, int width, int height, std::vector<std::vector<float>>& lms,int oriwidth,int oriheight);
AclLiteError GetThreshold(float &threshold, uint32_t channelId);
AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg) override;
//public:
// float threshold_ = 0.3;
// const char* configFile_;
//// uint32_t totalBox_ = 17640;
// int channelId_;
// ModelInfo modelInfo_;
};
#endif //HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
...@@ -61,6 +61,8 @@ private: ...@@ -61,6 +61,8 @@ private:
int channelId_; int channelId_;
cv::VideoWriter outputVideo_; cv::VideoWriter outputVideo_;
ModelInfo modelInfo_; ModelInfo modelInfo_;
public:
int kEachResultTensorNum_;
}; };
#endif #endif
\ No newline at end of file
...@@ -30,7 +30,7 @@ ...@@ -30,7 +30,7 @@
class ClassifyPreprocessThread : public AclLiteThread { class ClassifyPreprocessThread : public AclLiteThread {
public: public:
ClassifyPreprocessThread(aclrtRunMode& runMode); ClassifyPreprocessThread(aclrtRunMode& runMode,ModelInfo programinfo);
~ClassifyPreprocessThread(); ~ClassifyPreprocessThread();
AclLiteError Init(); AclLiteError Init();
...@@ -46,6 +46,7 @@ private: ...@@ -46,6 +46,7 @@ private:
uint32_t modelWidth_; uint32_t modelWidth_;
uint32_t modelHeight_; uint32_t modelHeight_;
AclLiteImageProc dvpp_; AclLiteImageProc dvpp_;
ModelInfo programinfo_;
}; };
#endif #endif
\ No newline at end of file
...@@ -42,10 +42,11 @@ public: ...@@ -42,10 +42,11 @@ public:
AclLiteError Process(int msgId, std::shared_ptr<void> data); AclLiteError Process(int msgId, std::shared_ptr<void> data);
std::vector<BBoxstr> nmsAllClasses(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses); std::vector<BBoxstr> nmsAllClasses(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses);
std::vector<BBoxstr> nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo); std::vector<BBoxstr> nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo);
std::vector<BBoxstr> outPutDecode(float* detectData, ModelInfo modelinfo, int image_ori_width,int image_ori_height,int image_resize_width,int image_resize_height); virtual std::vector<BBoxstr> outPutDecode(float* detectData, ModelInfo modelinfo, int image_ori_width,int image_ori_height,int image_resize_width,int image_resize_height);
AclLiteError GetThreshold(float &threshold, uint32_t channelId); AclLiteError GetThreshold(float &threshold, uint32_t channelId);
virtual AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
private: private:
AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
AclLiteError MsgSend(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg); AclLiteError MsgSend(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
public: public:
......
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
#include "onlyDetectInference.h" #include "onlyDetectInference.h"
#include "inference.h" #include "inference.h"
#include "detectPostprocess.h" #include "detectPostprocess.h"
#include "centerfacePostprocess.h"
...@@ -38,6 +39,11 @@ public: ...@@ -38,6 +39,11 @@ public:
protected: protected:
const char* kConfigFile_; const char* kConfigFile_;
std::string kRtspUrl_; std::string kRtspUrl_;
public:
ModelInfo modelInfo_;
OnlyDetectNameInfo detectNameInfo_;
Detect2clsNameInfo detectclsNameInfo_;
}; };
class carColorInferenceInstance : public GetInferenceInstance{ class carColorInferenceInstance : public GetInferenceInstance{
public: public:
...@@ -45,8 +51,8 @@ public: ...@@ -45,8 +51,8 @@ public:
void Init(const char* kConfigFile) override; void Init(const char* kConfigFile) override;
~carColorInferenceInstance(); ~carColorInferenceInstance();
void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0); void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0);
Detect2clsNameInfo nameInfo_; // Detect2clsNameInfo nameInfo_;
ModelInfo carModelInfo_; // ModelInfo carModelInfo_;
}; };
class helmetInferenceInstance : public GetInferenceInstance{ class helmetInferenceInstance : public GetInferenceInstance{
...@@ -58,8 +64,8 @@ public: ...@@ -58,8 +64,8 @@ public:
// const std::string kDetectPreName_ = "helmetdetectPre"; // const std::string kDetectPreName_ = "helmetdetectPre";
// const std::string kDetectPostName_ = "helmetdetectPost"; // const std::string kDetectPostName_ = "helmetdetectPost";
// const std::string kInferName_ = "helmetinference"; // const std::string kInferName_ = "helmetinference";
ModelInfo helmetinfo_; // ModelInfo helmetinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
...@@ -72,8 +78,8 @@ public: ...@@ -72,8 +78,8 @@ public:
// const std::string kDetectPreName_ = "firesmokedetectPre"; // const std::string kDetectPreName_ = "firesmokedetectPre";
// const std::string kDetectPostName_ = "firesmokedetectPost"; // const std::string kDetectPostName_ = "firesmokedetectPost";
// const std::string kInferName_ = "firesmokeinference"; // const std::string kInferName_ = "firesmokeinference";
ModelInfo firesmokeinfo_; // ModelInfo firesmokeinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
class smokerSleepInferenceInstance : public GetInferenceInstance{ class smokerSleepInferenceInstance : public GetInferenceInstance{
public: public:
...@@ -84,8 +90,8 @@ public: ...@@ -84,8 +90,8 @@ public:
const std::string kDetectPreName_ = "smokesleepdetectPre"; const std::string kDetectPreName_ = "smokesleepdetectPre";
const std::string kDetectPostName_ = "smokesleepdetectPost"; const std::string kDetectPostName_ = "smokesleepdetectPost";
const std::string kInferName_ = "smokesleepinference"; const std::string kInferName_ = "smokesleepinference";
ModelInfo smokesleepinfo_; // ModelInfo smokesleepinfo_;
Detect2clsNameInfo nameInfo_; // Detect2clsNameInfo nameInfo_;
}; };
...@@ -99,8 +105,8 @@ public: ...@@ -99,8 +105,8 @@ public:
// const std::string kDetectPreName_ = "smokesleepdetectPre"; // const std::string kDetectPreName_ = "smokesleepdetectPre";
// const std::string kDetectPostName_ = "smokesleepdetectPost"; // const std::string kDetectPostName_ = "smokesleepdetectPost";
// const std::string kInferName_ = "smokesleepinference"; // const std::string kInferName_ = "smokesleepinference";
ModelInfo yangcheninfo_; // ModelInfo yangcheninfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
...@@ -113,8 +119,22 @@ public: ...@@ -113,8 +119,22 @@ public:
// const std::string kDetectPreName_ = "smokesleepdetectPre"; // const std::string kDetectPreName_ = "smokesleepdetectPre";
// const std::string kDetectPostName_ = "smokesleepdetectPost"; // const std::string kDetectPostName_ = "smokesleepdetectPost";
// const std::string kInferName_ = "smokesleepinference"; // const std::string kInferName_ = "smokesleepinference";
ModelInfo cocopinfo_; // ModelInfo cocopinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
};
class centerfaceMaskInferenceInstance : public GetInferenceInstance{
public:
centerfaceMaskInferenceInstance();
void Init(const char* kConfigFile) override;
~centerfaceMaskInferenceInstance();
void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0);
const std::string kDetectPreName_ = "centerfacemaskdetectPre";
const std::string kDetectPostName_ = "centerfacemaskdetectPost";
const std::string kInferName_ = "centerfacemaskinference";
// ModelInfo centerfacemaskinfo_;
// Detect2clsNameInfo nameInfo_;
}; };
...@@ -133,6 +153,7 @@ public: ...@@ -133,6 +153,7 @@ public:
case INSTANCENAME::SMOKER_SLEEP_INFERENCE_INSTANCE : return new smokerSleepInferenceInstance; case INSTANCENAME::SMOKER_SLEEP_INFERENCE_INSTANCE : return new smokerSleepInferenceInstance;
case INSTANCENAME::YANGCHEN_INFERENCE_INSTANCE : return new yangChenInferenceInstance; case INSTANCENAME::YANGCHEN_INFERENCE_INSTANCE : return new yangChenInferenceInstance;
case INSTANCENAME::COCO_PERSON_INFERENCE_INSTANCE : return new cocoInferenceInstance; case INSTANCENAME::COCO_PERSON_INFERENCE_INSTANCE : return new cocoInferenceInstance;
case INSTANCENAME::MASK_INFERENCE_INSTANCE : return new centerfaceMaskInferenceInstance;
default : break; default : break;
} }
} }
......
This diff is collapsed.
...@@ -300,6 +300,10 @@ AclLiteError InferenceThread::ModelSwith(int type) { ...@@ -300,6 +300,10 @@ AclLiteError InferenceThread::ModelSwith(int type) {
kDetectModelPath_ = "../model/coco/yolo5s_coco_640x448_yuvaipp448_rgb.om"; kDetectModelPath_ = "../model/coco/yolo5s_coco_640x448_yuvaipp448_rgb.om";
kClassifyModelPath_ = "../model/color_dynamic_batch.om"; kClassifyModelPath_ = "../model/color_dynamic_batch.om";
break; break;
case INSTANCENAME::MASK_INFERENCE_INSTANCE:
kDetectModelPath_ = "../model/facemask/centerface_640x448_aipp.om";
kClassifyModelPath_ = "../model/facemask/sbd_mask_dynamic.om";
break;
default : return ACLLITE_ERROR; default : return ACLLITE_ERROR;
} }
return ACLLITE_OK; return ACLLITE_OK;
......
...@@ -258,7 +258,8 @@ int main(int argc, char** argv) ...@@ -258,7 +258,8 @@ int main(int argc, char** argv)
" 火焰烟雾检测 id = 2" " 火焰烟雾检测 id = 2"
" 吸烟睡岗检测 id = 3" " 吸烟睡岗检测 id = 3"
" coco 人员检测 id = 4" " coco 人员检测 id = 4"
" 扬尘检测 id = 5", true,1); " 扬尘检测 id = 5"
" 口罩检测 id = 6", true,1);
// const int CAR_COLOR_INFERENCE_INSTANCE = 0; // const int CAR_COLOR_INFERENCE_INSTANCE = 0;
// const int HELMET_INFERENCE_INSTANCE = 1; // const int HELMET_INFERENCE_INSTANCE = 1;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment