Commit abe959b8 authored by wenboqiang's avatar wenboqiang

1.add facemask detect

2.更新 classifypost 通过 cls label size 指定分类网络输出label数量,不在写死。
3.更新 classifyprepost 通过modelinfo 传入 inputshape
parent 77cf31ca
...@@ -108,6 +108,7 @@ add_executable(main ...@@ -108,6 +108,7 @@ add_executable(main
inference/onlyDetectInference.cpp inference/onlyDetectInference.cpp
detectPreprocess/helmetPreprocess.cpp detectPreprocess/helmetPreprocess.cpp
inference/inferFactory.cpp inference/inferFactory.cpp
detectPostprocess/centerfacePostprocess.cpp
mqtt/mqttsend.cpp mqtt/mqttsend.cpp
main.cpp ) main.cpp )
......
...@@ -119,7 +119,7 @@ AclLiteError ClassifyPostprocessThread::Init() ...@@ -119,7 +119,7 @@ AclLiteError ClassifyPostprocessThread::Init()
if (ret != ACLLITE_OK) { if (ret != ACLLITE_OK) {
return ACLLITE_ERROR; return ACLLITE_ERROR;
} }
kEachResultTensorNum_ = modelInfo_.clsLabel.size();
if (outputType_ == "video") { if (outputType_ == "video") {
ret = GetOutputFrameResolution(outputFrameWidth_, outputFrameHeight_, channelId_); ret = GetOutputFrameResolution(outputFrameWidth_, outputFrameHeight_, channelId_);
if (ret != ACLLITE_OK) { if (ret != ACLLITE_OK) {
...@@ -321,11 +321,11 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD ...@@ -321,11 +321,11 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD
outData = reinterpret_cast<float*>(data); outData = reinterpret_cast<float*>(data);
float maxconf = 0; float maxconf = 0;
for (int i = 0; i < objDetectDataMsg->objInfo.size(); i++) { for (int i = 0; i < objDetectDataMsg->objInfo.size(); i++) {
int maxConfidentIndex = i * kEachResultTensorNum; int maxConfidentIndex = i * kEachResultTensorNum_;
float confsum = 0.0; float confsum = 0.0;
for (int j = 0; j < kEachResultTensorNum; j++) { for (int j = 0; j < kEachResultTensorNum_; j++) {
int index = i * kEachResultTensorNum + j; int index = i * kEachResultTensorNum_ + j;
confsum += std::exp(outData[index]); confsum += std::exp(outData[index]);
if (outData[index] > outData[maxConfidentIndex]) { if (outData[index] > outData[maxConfidentIndex]) {
maxConfidentIndex = index; maxConfidentIndex = index;
...@@ -333,10 +333,10 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD ...@@ -333,10 +333,10 @@ AclLiteError ClassifyPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectD
} }
maxconf = std::exp(outData[maxConfidentIndex]) / confsum; maxconf = std::exp(outData[maxConfidentIndex]) / confsum;
if (maxconf < 0.25){ // if (maxconf < 0.25){
ACLLITE_LOG_ERROR("cls conf < 0.25"); // ACLLITE_LOG_ERROR("cls conf < 0.25");
} // }
int colorIndex = maxConfidentIndex - i * kEachResultTensorNum; int colorIndex = maxConfidentIndex - i * kEachResultTensorNum_;
objDetectDataMsg->objInfo[i].classify_result = modelInfo_.clsLabel[colorIndex]; objDetectDataMsg->objInfo[i].classify_result = modelInfo_.clsLabel[colorIndex];
objDetectDataMsg->objInfo[i].detect_result = modelInfo_.clsLabel[colorIndex] + '_' +std::to_string(maxconf); objDetectDataMsg->objInfo[i].detect_result = modelInfo_.clsLabel[colorIndex] + '_' +std::to_string(maxconf);
} }
......
...@@ -29,8 +29,8 @@ uint32_t kModelWidth = 256; ...@@ -29,8 +29,8 @@ uint32_t kModelWidth = 256;
uint32_t kModelHeight = 256; uint32_t kModelHeight = 256;
} }
ClassifyPreprocessThread::ClassifyPreprocessThread(aclrtRunMode& runMode) ClassifyPreprocessThread::ClassifyPreprocessThread(aclrtRunMode& runMode,ModelInfo programinfo)
:runMode_(runMode), modelWidth_(kModelWidth), modelHeight_(kModelHeight) :runMode_(runMode), modelWidth_(programinfo.clsifyInputWidth), modelHeight_(programinfo.clsifyInputHeight),programinfo_(programinfo)
{ {
} }
...@@ -46,6 +46,7 @@ AclLiteError ClassifyPreprocessThread::Init() ...@@ -46,6 +46,7 @@ AclLiteError ClassifyPreprocessThread::Init()
ACLLITE_LOG_ERROR("Dvpp init failed, error %d", aclRet); ACLLITE_LOG_ERROR("Dvpp init failed, error %d", aclRet);
return ACLLITE_ERROR; return ACLLITE_ERROR;
} }
return ACLLITE_OK; return ACLLITE_OK;
} }
...@@ -101,7 +102,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs) ...@@ -101,7 +102,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs)
// ret = dvpp_.Resize(carImgs[i].resizedImgs, carImgs[i].cropedImgs, // ret = dvpp_.Resize(carImgs[i].resizedImgs, carImgs[i].cropedImgs,
// kModelWidth, kModelHeight); // kModelWidth, kModelHeight);
// } // }
ret = dvpp_.CropPaste(carImgs[i].resizedImgs,carImgs[i].cropedImgs, kModelWidth, kModelHeight, ret = dvpp_.CropPaste(carImgs[i].resizedImgs,carImgs[i].cropedImgs, modelWidth_, modelHeight_,
0, 0, carImgs[i].cropedImgs.width,carImgs[i].cropedImgs.height); 0, 0, carImgs[i].cropedImgs.width,carImgs[i].cropedImgs.height);
// if ((int) i ==4){ // if ((int) i ==4){
...@@ -111,7 +112,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs) ...@@ -111,7 +112,7 @@ AclLiteError ClassifyPreprocessThread::Resize(vector<ObjInfo> &carImgs)
// cv::Mat yuvimg(padingImage.alignHeight*1.5, padingImage.alignWidth, CV_8UC1, padingImage.data.get()); // cv::Mat yuvimg(padingImage.alignHeight*1.5, padingImage.alignWidth, CV_8UC1, padingImage.data.get());
// cv::Mat saveframe; // cv::Mat saveframe;
// cv::cvtColor(yuvimg, saveframe, CV_YUV2BGR_NV12); // cv::cvtColor(yuvimg, saveframe, CV_YUV2BGR_NV12);
// cv::imwrite("../data/pic/copysleep" + to_string(i) +".jpg",saveframe); // cv::imwrite("../data/pic/copyface" + to_string(i) +".jpg",saveframe);
// } // }
if (ret) { if (ret) {
......
//
// Created by 乾三 on 2023/4/18.
//
#include "../include/centerfacePostprocess.h"
namespace {
const int kwidth = 640;
const int kheight = 448;
// enum BBoxIndex { TOPLEFTX = 0, TOPLEFTY = 1, BOTTOMRIGHTX = 2, BOTTOMRIGHTY = 3, SCORE = 4, LABEL = 5 };
}
centerfacePostprocessThread::centerfacePostprocessThread() {
}
std::vector<BBoxstr> centerfacePostprocessThread::outPutDecode(float* heatmap, float* scale, float* offset, float* landmark, int width, int height, std::vector<std::vector<float>>& lms,int oriwidth,int oriheight) {
std::vector<BBoxstr> detectResults;
int len = width * height;
int c0, c1;
float s0, s1, o0, o1, s, x1, y1,widthScale,heightScale;
lms.clear();
std::vector<float> lm;
for (int i = 0; i < len; i++) {
if (heatmap[i] > threshold_) {
widthScale = float(kwidth) / float(oriwidth);
heightScale = float(kheight) / float(oriheight);
BBoxstr boundBox;
c0 = i / width;
c1 = i % width;
s0 = exp(scale[c0 * width + c1]) * 4 ;
s1 = exp(scale[len + c0 * width + c1]) * 4 ;
o0 = offset[c0 * width + c1];
o1 = offset[len + c0 * width + c1];
s = heatmap[i];
x1 = std::max(0.f, (c1 + o1 + 0.5f) * 4 - s1 / 2);
y1 = std::max(0.f, (c0 + o0 + 0.5f) * 4 - s0 / 2);
x1 = std::min(x1, (float)kwidth);
y1 = std::min(y1, (float)kheight);
// if (widthScale < heightScale){ // y 轴需要补充 x轴缩放
// boundBox.rect.ltX = x1 / widthScale;
// (1 - (oriheight * widthScale) / kheight)
//// boundBox.rect.ltY = y1;
// boundBox.rect.ltY = (int)(y1 - ((kheight - (oriheight / widthScale))/2));
// boundBox.rect.rbX = std::min(x1 + s1, (float)kwidth);
// boundBox.rect.rbY = std::min(y1 + s0, (float)kheight);
// boundBox.cls = 0;
// boundBox.score = s;
//
// } else{
// boundBox.rect.ltX = x1;
// boundBox.rect.ltY = y1;
// boundBox.rect.rbX = std::min(x1 + s1, (float)kwidth);
// boundBox.rect.rbY = std::min(y1 + s0, (float)kheight);
// boundBox.cls = 0;
// boundBox.score = s;
// }
if (heightScale > widthScale)
{
boundBox.rect.ltX = std::max((int)(x1 / widthScale),1);
boundBox.rect.rbX = std::min((int)((x1 + s1) / widthScale),oriwidth) ;
boundBox.rect.ltY = std::max((int)((((y1) - (kheight - widthScale * oriheight) / 2)) / widthScale),1);
boundBox.rect.rbY = std::min((int)((((y1 + s0) - (kheight - widthScale * oriheight) / 2)) / widthScale),oriheight - 1);
}
else
{
boundBox.rect.ltX = std::max((int)(((x1) - (kwidth - heightScale * oriwidth) / 2) / heightScale),1);
boundBox.rect.rbX = std::min((int)( ((x1 + s1) - (kwidth - heightScale * oriwidth) / 2) / heightScale),oriwidth-1);
boundBox.rect.ltY = std::max((int)((y1) / heightScale),0);
boundBox.rect.rbY = std::min((int)((y1 + s0) / heightScale),oriheight);
}
boundBox.cls = 0;
boundBox.score = s;
detectResults.push_back(boundBox);
// if (landmarks) {
// lm.clear();
// for (int j = 0; j < 5; j++) {
// lm.push_back(landmark[j * 2 + 1 * len + c0 * width + c1] * s1 + x1);
// lm.push_back(landmark[j * 2 + c0 * width + c1] * s0 + y1);
// }
// lms.push_back(lm);
// }
}
}
std::vector<BBoxstr> bboxesNew = nms(modelInfo_.nmsThresh, detectResults, modelInfo_.classnum);
return bboxesNew;
}
AclLiteError centerfacePostprocessThread::InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg)
{
if (objDetectDataMsg->isLastFrame)
return ACLLITE_OK;
float* heatmap = (float *)objDetectDataMsg->detectInferData[0].data.get();
float* scale = (float *)objDetectDataMsg->detectInferData[1].data.get();
float* offset = (float *)objDetectDataMsg->detectInferData[2].data.get();
float* landmark = (float *)objDetectDataMsg->detectInferData[3].data.get();
if (heatmap == nullptr) {
ACLLITE_LOG_ERROR("detect inferoutput is null\n");
return ACLLITE_ERROR;
}
totalBox_ = modelInfo_.totalBox;
std::vector<std::vector<float>> lms;
std::vector<BBoxstr> bboxesNew = outPutDecode(heatmap, scale, offset, landmark, kwidth/4, kheight/4,lms,objDetectDataMsg->imageFrame.width,objDetectDataMsg->imageFrame.height);
objDetectDataMsg->objInfo = static_cast<const std::vector<ObjInfo>>(NULL);
for (auto& bboxesNew_i : bboxesNew)
{
ObjInfo objInfo;
objInfo.rectangle.lt.x = bboxesNew_i.rect.ltX;
objInfo.rectangle.lt.y = bboxesNew_i.rect.ltY;
objInfo.rectangle.rb.x = bboxesNew_i.rect.rbX;
objInfo.rectangle.rb.y = bboxesNew_i.rect.rbY;
auto constr = std::to_string(bboxesNew_i.score);
constr = constr.substr(0, constr.find(".") + 3);
objInfo.detect_result = modelInfo_.Label[bboxesNew_i.cls] + '_'+ constr;
objDetectDataMsg->objInfo.emplace_back(objInfo);
}
return ACLLITE_OK;
}
AclLiteError centerfacePostprocessThread::Init() {
return DetectPostprocessThread::Init();
}
AclLiteError centerfacePostprocessThread::Process(int msgId, std::shared_ptr<void> data) {
return DetectPostprocessThread::Process(msgId, data);
}
centerfacePostprocessThread::centerfacePostprocessThread(const char *&configFile, int channelId, ModelInfo programinfo)
: DetectPostprocessThread(configFile, channelId, programinfo) {
}
centerfacePostprocessThread::~centerfacePostprocessThread() {
}
std::vector<BBoxstr> centerfacePostprocessThread::nms(const float nmsThresh, std::vector<BBoxstr> &binfo, const uint numClasses) {
return DetectPostprocessThread::nmsAllClasses(nmsThresh, binfo, numClasses);
}
//std::vector<BBoxstr> centerfacePostprocessThread::nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo) {
// return DetectPostprocessThread::nonMaximumSuppression(nmsThresh, binfo);
//}
//std::vector<BBoxstr> centerfacePostprocessThread::nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo) {
// return DetectPostprocessThread::nonMaximumSuppression(nmsThresh, binfo);
//}
...@@ -259,10 +259,6 @@ AclLiteError DetectPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectDat ...@@ -259,10 +259,6 @@ AclLiteError DetectPostprocessThread::InferOutputProcess(shared_ptr<ObjDetectDat
return ACLLITE_OK; return ACLLITE_OK;
} }
//DetectPostprocessThread::DetectPostprocessThread(ModelInfo programinfo)
// :modelInfo_(programinfo)
//{
//}
DetectPostprocessThread::DetectPostprocessThread(const char*& configFile, int channelId,ModelInfo programinfo) DetectPostprocessThread::DetectPostprocessThread(const char*& configFile, int channelId,ModelInfo programinfo)
:configFile_(configFile), channelId_(channelId), modelInfo_(programinfo) :configFile_(configFile), channelId_(channelId), modelInfo_(programinfo)
{ {
......
//
// Created by 乾三 on 2023/4/18.
//
#ifndef HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
#define HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
#include "../include/detectPostprocess.h"
#include <iostream>
#include <vector>
#include <opencv2/opencv.hpp>
//#include <opencv2/dnn.hpp>
class centerfacePostprocessThread : public DetectPostprocessThread{
public:
centerfacePostprocessThread();
// DetectPostprocessThread(ModelInfo programinfo);
centerfacePostprocessThread(const char *&configFile, int channelId, ModelInfo programinfo);
~centerfacePostprocessThread();
AclLiteError Init();
AclLiteError Process(int msgId, std::shared_ptr<void> data);
std::vector<BBoxstr> nms(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses);
std::vector<BBoxstr> outPutDecode(float* heatmap, float* scale, float* offset, float* landmark, int width, int height, std::vector<std::vector<float>>& lms,int oriwidth,int oriheight);
AclLiteError GetThreshold(float &threshold, uint32_t channelId);
AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg) override;
//public:
// float threshold_ = 0.3;
// const char* configFile_;
//// uint32_t totalBox_ = 17640;
// int channelId_;
// ModelInfo modelInfo_;
};
#endif //HUAWEI_ATLAS_DEMO_CENTERFACEPOSTPROCESSTHREAD_H
...@@ -61,6 +61,8 @@ private: ...@@ -61,6 +61,8 @@ private:
int channelId_; int channelId_;
cv::VideoWriter outputVideo_; cv::VideoWriter outputVideo_;
ModelInfo modelInfo_; ModelInfo modelInfo_;
public:
int kEachResultTensorNum_;
}; };
#endif #endif
\ No newline at end of file
...@@ -30,7 +30,7 @@ ...@@ -30,7 +30,7 @@
class ClassifyPreprocessThread : public AclLiteThread { class ClassifyPreprocessThread : public AclLiteThread {
public: public:
ClassifyPreprocessThread(aclrtRunMode& runMode); ClassifyPreprocessThread(aclrtRunMode& runMode,ModelInfo programinfo);
~ClassifyPreprocessThread(); ~ClassifyPreprocessThread();
AclLiteError Init(); AclLiteError Init();
...@@ -46,6 +46,7 @@ private: ...@@ -46,6 +46,7 @@ private:
uint32_t modelWidth_; uint32_t modelWidth_;
uint32_t modelHeight_; uint32_t modelHeight_;
AclLiteImageProc dvpp_; AclLiteImageProc dvpp_;
ModelInfo programinfo_;
}; };
#endif #endif
\ No newline at end of file
...@@ -42,10 +42,11 @@ public: ...@@ -42,10 +42,11 @@ public:
AclLiteError Process(int msgId, std::shared_ptr<void> data); AclLiteError Process(int msgId, std::shared_ptr<void> data);
std::vector<BBoxstr> nmsAllClasses(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses); std::vector<BBoxstr> nmsAllClasses(const float nmsThresh, std::vector<BBoxstr>& binfo, const uint numClasses);
std::vector<BBoxstr> nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo); std::vector<BBoxstr> nonMaximumSuppression(const float nmsThresh, std::vector<BBoxstr> binfo);
std::vector<BBoxstr> outPutDecode(float* detectData, ModelInfo modelinfo, int image_ori_width,int image_ori_height,int image_resize_width,int image_resize_height); virtual std::vector<BBoxstr> outPutDecode(float* detectData, ModelInfo modelinfo, int image_ori_width,int image_ori_height,int image_resize_width,int image_resize_height);
AclLiteError GetThreshold(float &threshold, uint32_t channelId); AclLiteError GetThreshold(float &threshold, uint32_t channelId);
virtual AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
private: private:
AclLiteError InferOutputProcess(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
AclLiteError MsgSend(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg); AclLiteError MsgSend(std::shared_ptr<ObjDetectDataMsg> objDetectDataMsg);
public: public:
......
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
#include "onlyDetectInference.h" #include "onlyDetectInference.h"
#include "inference.h" #include "inference.h"
#include "detectPostprocess.h" #include "detectPostprocess.h"
#include "centerfacePostprocess.h"
...@@ -38,6 +39,11 @@ public: ...@@ -38,6 +39,11 @@ public:
protected: protected:
const char* kConfigFile_; const char* kConfigFile_;
std::string kRtspUrl_; std::string kRtspUrl_;
public:
ModelInfo modelInfo_;
OnlyDetectNameInfo detectNameInfo_;
Detect2clsNameInfo detectclsNameInfo_;
}; };
class carColorInferenceInstance : public GetInferenceInstance{ class carColorInferenceInstance : public GetInferenceInstance{
public: public:
...@@ -45,8 +51,8 @@ public: ...@@ -45,8 +51,8 @@ public:
void Init(const char* kConfigFile) override; void Init(const char* kConfigFile) override;
~carColorInferenceInstance(); ~carColorInferenceInstance();
void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0); void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0);
Detect2clsNameInfo nameInfo_; // Detect2clsNameInfo nameInfo_;
ModelInfo carModelInfo_; // ModelInfo carModelInfo_;
}; };
class helmetInferenceInstance : public GetInferenceInstance{ class helmetInferenceInstance : public GetInferenceInstance{
...@@ -58,8 +64,8 @@ public: ...@@ -58,8 +64,8 @@ public:
// const std::string kDetectPreName_ = "helmetdetectPre"; // const std::string kDetectPreName_ = "helmetdetectPre";
// const std::string kDetectPostName_ = "helmetdetectPost"; // const std::string kDetectPostName_ = "helmetdetectPost";
// const std::string kInferName_ = "helmetinference"; // const std::string kInferName_ = "helmetinference";
ModelInfo helmetinfo_; // ModelInfo helmetinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
...@@ -72,8 +78,8 @@ public: ...@@ -72,8 +78,8 @@ public:
// const std::string kDetectPreName_ = "firesmokedetectPre"; // const std::string kDetectPreName_ = "firesmokedetectPre";
// const std::string kDetectPostName_ = "firesmokedetectPost"; // const std::string kDetectPostName_ = "firesmokedetectPost";
// const std::string kInferName_ = "firesmokeinference"; // const std::string kInferName_ = "firesmokeinference";
ModelInfo firesmokeinfo_; // ModelInfo firesmokeinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
class smokerSleepInferenceInstance : public GetInferenceInstance{ class smokerSleepInferenceInstance : public GetInferenceInstance{
public: public:
...@@ -84,8 +90,8 @@ public: ...@@ -84,8 +90,8 @@ public:
const std::string kDetectPreName_ = "smokesleepdetectPre"; const std::string kDetectPreName_ = "smokesleepdetectPre";
const std::string kDetectPostName_ = "smokesleepdetectPost"; const std::string kDetectPostName_ = "smokesleepdetectPost";
const std::string kInferName_ = "smokesleepinference"; const std::string kInferName_ = "smokesleepinference";
ModelInfo smokesleepinfo_; // ModelInfo smokesleepinfo_;
Detect2clsNameInfo nameInfo_; // Detect2clsNameInfo nameInfo_;
}; };
...@@ -99,8 +105,8 @@ public: ...@@ -99,8 +105,8 @@ public:
// const std::string kDetectPreName_ = "smokesleepdetectPre"; // const std::string kDetectPreName_ = "smokesleepdetectPre";
// const std::string kDetectPostName_ = "smokesleepdetectPost"; // const std::string kDetectPostName_ = "smokesleepdetectPost";
// const std::string kInferName_ = "smokesleepinference"; // const std::string kInferName_ = "smokesleepinference";
ModelInfo yangcheninfo_; // ModelInfo yangcheninfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
}; };
...@@ -113,8 +119,22 @@ public: ...@@ -113,8 +119,22 @@ public:
// const std::string kDetectPreName_ = "smokesleepdetectPre"; // const std::string kDetectPreName_ = "smokesleepdetectPre";
// const std::string kDetectPostName_ = "smokesleepdetectPost"; // const std::string kDetectPostName_ = "smokesleepdetectPost";
// const std::string kInferName_ = "smokesleepinference"; // const std::string kInferName_ = "smokesleepinference";
ModelInfo cocopinfo_; // ModelInfo cocopinfo_;
OnlyDetectNameInfo detectNameInfo_; // OnlyDetectNameInfo detectNameInfo_;
};
class centerfaceMaskInferenceInstance : public GetInferenceInstance{
public:
centerfaceMaskInferenceInstance();
void Init(const char* kConfigFile) override;
~centerfaceMaskInferenceInstance();
void getInstance(std::vector<AclLiteThreadParam>& threadTbl, int32_t deviceId,aclrtContext& context, aclrtRunMode& runMode, int rtspNumPerDevice,bool kDisplay, int addrtspNumPerDevice=0);
const std::string kDetectPreName_ = "centerfacemaskdetectPre";
const std::string kDetectPostName_ = "centerfacemaskdetectPost";
const std::string kInferName_ = "centerfacemaskinference";
// ModelInfo centerfacemaskinfo_;
// Detect2clsNameInfo nameInfo_;
}; };
...@@ -133,6 +153,7 @@ public: ...@@ -133,6 +153,7 @@ public:
case INSTANCENAME::SMOKER_SLEEP_INFERENCE_INSTANCE : return new smokerSleepInferenceInstance; case INSTANCENAME::SMOKER_SLEEP_INFERENCE_INSTANCE : return new smokerSleepInferenceInstance;
case INSTANCENAME::YANGCHEN_INFERENCE_INSTANCE : return new yangChenInferenceInstance; case INSTANCENAME::YANGCHEN_INFERENCE_INSTANCE : return new yangChenInferenceInstance;
case INSTANCENAME::COCO_PERSON_INFERENCE_INSTANCE : return new cocoInferenceInstance; case INSTANCENAME::COCO_PERSON_INFERENCE_INSTANCE : return new cocoInferenceInstance;
case INSTANCENAME::MASK_INFERENCE_INSTANCE : return new centerfaceMaskInferenceInstance;
default : break; default : break;
} }
} }
......
...@@ -54,17 +54,17 @@ carColorInferenceInstance::carColorInferenceInstance() { ...@@ -54,17 +54,17 @@ carColorInferenceInstance::carColorInferenceInstance() {
void carColorInferenceInstance::Init(const char* kConfigFile) { void carColorInferenceInstance::Init(const char* kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
nameInfo_.kInferName = "carinference"; detectclsNameInfo_.kInferName = "carinference";
nameInfo_.kDetectPreName = "cardetectPre"; detectclsNameInfo_.kDetectPreName = "cardetectPre";
nameInfo_.kDetectPostName = "cardetectPost"; detectclsNameInfo_.kDetectPostName = "cardetectPost";
nameInfo_.kClassifyPreName = "carclassifyPre"; detectclsNameInfo_.kClassifyPreName = "carclassifyPre";
nameInfo_.kClassifyPostName = "carclassifyPost"; detectclsNameInfo_.kClassifyPostName = "carclassifyPost";
nameInfo_.kRtspUrl = "carrtspDisplay"; detectclsNameInfo_.kRtspUrl = "carrtspDisplay";
nameInfo_.kMqttName = "carcolormqtt"; detectclsNameInfo_.kMqttName = "carcolormqtt";
carModelInfo_.clsifyInputWidth = 224; modelInfo_.clsifyInputWidth = 224;
carModelInfo_.clsifyInputHeight = 224; modelInfo_.clsifyInputHeight = 224;
carModelInfo_.modelInputWidth = 640; modelInfo_.modelInputWidth = 640;
carModelInfo_.modelInputHeight = 448; modelInfo_.modelInputHeight = 448;
} }
...@@ -77,36 +77,36 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr ...@@ -77,36 +77,36 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr
int channelId = deviceId*rtspNumPerDevice+index; int channelId = deviceId*rtspNumPerDevice+index;
if (kDisplay) { if (kDisplay) {
detectPreThreadParam.threadInst = detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, nameInfo_, true); new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_, true);
} else { } else {
detectPreThreadParam.threadInst = detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, nameInfo_); new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_);
} }
std::string DetectPreName = nameInfo_.kDetectPreName + std::to_string(channelId); std::string DetectPreName = detectclsNameInfo_.kDetectPreName + std::to_string(channelId);
detectPreThreadParam.threadInstName.assign(DetectPreName.c_str()); detectPreThreadParam.threadInstName.assign(DetectPreName.c_str());
detectPreThreadParam.context = context; detectPreThreadParam.context = context;
detectPreThreadParam.runMode = runMode; detectPreThreadParam.runMode = runMode;
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new DetectPostprocessThread(kConfigFile_,channelId,carModelInfo_); detectPostThreadParam.threadInst = new DetectPostprocessThread(kConfigFile_,channelId,modelInfo_);
std::string DetectPostName = nameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectclsNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
detectPostThreadParam.runMode = runMode; detectPostThreadParam.runMode = runMode;
threadTbl.push_back(detectPostThreadParam); threadTbl.push_back(detectPostThreadParam);
AclLiteThreadParam classifyPreThreadParam; AclLiteThreadParam classifyPreThreadParam;
classifyPreThreadParam.threadInst = new ClassifyPreprocessThread(runMode); classifyPreThreadParam.threadInst = new ClassifyPreprocessThread(runMode,modelInfo_);
std::string ClassifyPreName = nameInfo_.kClassifyPreName + std::to_string(channelId); std::string ClassifyPreName = detectclsNameInfo_.kClassifyPreName + std::to_string(channelId);
classifyPreThreadParam.threadInstName.assign(ClassifyPreName.c_str()); classifyPreThreadParam.threadInstName.assign(ClassifyPreName.c_str());
classifyPreThreadParam.context = context; classifyPreThreadParam.context = context;
classifyPreThreadParam.runMode = runMode; classifyPreThreadParam.runMode = runMode;
threadTbl.push_back(classifyPreThreadParam); threadTbl.push_back(classifyPreThreadParam);
AclLiteThreadParam classifyPostThreadParam; AclLiteThreadParam classifyPostThreadParam;
classifyPostThreadParam.threadInst = new ClassifyPostprocessThread(kConfigFile_, channelId,carModelInfo_); classifyPostThreadParam.threadInst = new ClassifyPostprocessThread(kConfigFile_, channelId,modelInfo_);
std::string ClassifyPostName = nameInfo_.kClassifyPostName + std::to_string(channelId); std::string ClassifyPostName = detectclsNameInfo_.kClassifyPostName + std::to_string(channelId);
classifyPostThreadParam.threadInstName.assign(ClassifyPostName.c_str()); classifyPostThreadParam.threadInstName.assign(ClassifyPostName.c_str());
classifyPostThreadParam.context = context; classifyPostThreadParam.context = context;
classifyPostThreadParam.runMode = runMode; classifyPostThreadParam.runMode = runMode;
...@@ -114,7 +114,7 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr ...@@ -114,7 +114,7 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr
AclLiteThreadParam cocomqttThreadParam; AclLiteThreadParam cocomqttThreadParam;
cocomqttThreadParam.threadInst = new MqttSendThread("0.0.0.0",1883); cocomqttThreadParam.threadInst = new MqttSendThread("0.0.0.0",1883);
std::string mqttName = nameInfo_.kMqttName + std::to_string(channelId); std::string mqttName = detectclsNameInfo_.kMqttName + std::to_string(channelId);
cocomqttThreadParam.threadInstName.assign(mqttName.c_str()); cocomqttThreadParam.threadInstName.assign(mqttName.c_str());
cocomqttThreadParam.context = context; cocomqttThreadParam.context = context;
cocomqttThreadParam.runMode = runMode; cocomqttThreadParam.runMode = runMode;
...@@ -123,7 +123,7 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr ...@@ -123,7 +123,7 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr
if (readOutputTypeConfig(channelId)) { if (readOutputTypeConfig(channelId)) {
AclLiteThreadParam rtspDisplayThreadParam; AclLiteThreadParam rtspDisplayThreadParam;
rtspDisplayThreadParam.threadInst = new PushRtspThread(kRtspUrl_ + std::to_string(channelId)); rtspDisplayThreadParam.threadInst = new PushRtspThread(kRtspUrl_ + std::to_string(channelId));
std::string RtspDisplayName = nameInfo_.kRtspUrl + std::to_string(channelId); std::string RtspDisplayName = detectclsNameInfo_.kRtspUrl + std::to_string(channelId);
rtspDisplayThreadParam.threadInstName.assign(RtspDisplayName.c_str()); rtspDisplayThreadParam.threadInstName.assign(RtspDisplayName.c_str());
rtspDisplayThreadParam.context = context; rtspDisplayThreadParam.context = context;
rtspDisplayThreadParam.runMode = runMode; rtspDisplayThreadParam.runMode = runMode;
...@@ -132,8 +132,8 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr ...@@ -132,8 +132,8 @@ void carColorInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr
} }
AclLiteThreadParam InferParam; AclLiteThreadParam InferParam;
InferParam.threadInst = new InferenceThread(runMode,carModelInfo_,0); InferParam.threadInst = new InferenceThread(runMode,modelInfo_,0);
std::string InferName = nameInfo_.kInferName + std::to_string(deviceId); std::string InferName = detectclsNameInfo_.kInferName + std::to_string(deviceId);
InferParam.threadInstName.assign(InferName.c_str()); InferParam.threadInstName.assign(InferName.c_str());
InferParam.context = context; InferParam.context = context;
InferParam.runMode = runMode; InferParam.runMode = runMode;
...@@ -151,15 +151,15 @@ helmetInferenceInstance::helmetInferenceInstance() { ...@@ -151,15 +151,15 @@ helmetInferenceInstance::helmetInferenceInstance() {
void helmetInferenceInstance::Init(const char *kConfigFile) { void helmetInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
helmetinfo_.classnum = 3; modelInfo_.classnum = 3;
helmetinfo_.conf = 0.5; modelInfo_.conf = 0.5;
helmetinfo_.Label = { "person", "nohat", "hat"}; modelInfo_.Label = { "person", "nohat", "hat"};
detectNameInfo_.kInferName = "helmetinference"; detectNameInfo_.kInferName = "helmetinference";
detectNameInfo_.kDetectPreName = "helmetdetectPre"; detectNameInfo_.kDetectPreName = "helmetdetectPre";
detectNameInfo_.kDetectPostName = "helmetdetectPost"; detectNameInfo_.kDetectPostName = "helmetdetectPost";
detectNameInfo_.kRtspUrl = "helmetrtspDisplay"; detectNameInfo_.kRtspUrl = "helmetrtspDisplay";
detectNameInfo_.kMqttName = "helmetmqtt"; detectNameInfo_.kMqttName = "helmetmqtt";
helmetinfo_.activateLabel = {1}; modelInfo_.activateLabel = {1};
} }
...@@ -188,7 +188,7 @@ void helmetInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threa ...@@ -188,7 +188,7 @@ void helmetInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threa
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, helmetinfo_); detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, modelInfo_);
std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
...@@ -232,12 +232,12 @@ fireSmokeInferenceInstance::fireSmokeInferenceInstance() { ...@@ -232,12 +232,12 @@ fireSmokeInferenceInstance::fireSmokeInferenceInstance() {
void fireSmokeInferenceInstance::Init(const char *kConfigFile) { void fireSmokeInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
firesmokeinfo_.modelInputWidth = 640; modelInfo_.modelInputWidth = 640;
firesmokeinfo_.modelInputHeight = 448; modelInfo_.modelInputHeight = 448;
firesmokeinfo_.classnum = 2; modelInfo_.classnum = 2;
firesmokeinfo_.conf = 0.4; modelInfo_.conf = 0.4;
firesmokeinfo_.totalBox = 17640; modelInfo_.totalBox = 17640;
firesmokeinfo_.Label = { "frie", "smoke"}; modelInfo_.Label = { "frie", "smoke"};
detectNameInfo_.kInferName = "firesmokeinference"; detectNameInfo_.kInferName = "firesmokeinference";
detectNameInfo_.kDetectPreName = "firesmokedetectPre"; detectNameInfo_.kDetectPreName = "firesmokedetectPre";
detectNameInfo_.kDetectPostName = "firesmokedetectPost"; detectNameInfo_.kDetectPostName = "firesmokedetectPost";
...@@ -272,7 +272,7 @@ void fireSmokeInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &th ...@@ -272,7 +272,7 @@ void fireSmokeInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &th
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, firesmokeinfo_); detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, modelInfo_);
std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
...@@ -317,22 +317,22 @@ smokerSleepInferenceInstance::smokerSleepInferenceInstance() { ...@@ -317,22 +317,22 @@ smokerSleepInferenceInstance::smokerSleepInferenceInstance() {
void smokerSleepInferenceInstance::Init(const char *kConfigFile) { void smokerSleepInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
smokesleepinfo_.modelInputWidth = 640; modelInfo_.modelInputWidth = 640;
smokesleepinfo_.modelInputHeight = 448; modelInfo_.modelInputHeight = 448;
smokesleepinfo_.classnum = 80; modelInfo_.classnum = 80;
smokesleepinfo_.conf = 0.4; modelInfo_.conf = 0.4;
smokesleepinfo_.totalBox = 17640; modelInfo_.totalBox = 17640;
smokesleepinfo_.clsifyInputWidth = 256; modelInfo_.clsifyInputWidth = 256;
smokesleepinfo_.clsifyInputHeight = 256; modelInfo_.clsifyInputHeight = 256;
nameInfo_.kInferName = "smokerinference"; detectclsNameInfo_.kInferName = "smokerinference";
// smokesleepinfo_.Label = { "frie", "smoke"}; // modelInfo_.Label = { "frie", "smoke"};
nameInfo_.kDetectPreName = "smokerdetectPre"; detectclsNameInfo_.kDetectPreName = "smokerdetectPre";
nameInfo_.kDetectPostName = "smokerdetectPost"; detectclsNameInfo_.kDetectPostName = "smokerdetectPost";
nameInfo_.kClassifyPreName = "smokerclassifyPre"; detectclsNameInfo_.kClassifyPreName = "smokerclassifyPre";
nameInfo_.kClassifyPostName = "smokerclassifyPost"; detectclsNameInfo_.kClassifyPostName = "smokerclassifyPost";
nameInfo_.kRtspUrl = "smokerrtspDisplay"; detectclsNameInfo_.kRtspUrl = "smokerrtspDisplay";
nameInfo_.kMqttName = "smokersleepmqtt"; detectclsNameInfo_.kMqttName = "smokersleepmqtt";
smokesleepinfo_.clsLabel = { "calling","normal","sleep","smoking"}; modelInfo_.clsLabel = { "calling","normal","sleep","smoking"};
} }
smokerSleepInferenceInstance::~smokerSleepInferenceInstance() { smokerSleepInferenceInstance::~smokerSleepInferenceInstance() {
...@@ -349,36 +349,36 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> & ...@@ -349,36 +349,36 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &
int channelId = deviceId*rtspNumPerDevice+index; int channelId = deviceId*rtspNumPerDevice+index;
if (kDisplay) { if (kDisplay) {
detectPreThreadParam.threadInst = detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, nameInfo_, true); new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_, true);
} else { } else {
detectPreThreadParam.threadInst = detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, nameInfo_); new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_);
} }
std::string DetectPreName = nameInfo_.kDetectPreName + std::to_string(channelId); std::string DetectPreName = detectclsNameInfo_.kDetectPreName + std::to_string(channelId);
detectPreThreadParam.threadInstName.assign(DetectPreName.c_str()); detectPreThreadParam.threadInstName.assign(DetectPreName.c_str());
detectPreThreadParam.context = context; detectPreThreadParam.context = context;
detectPreThreadParam.runMode = runMode; detectPreThreadParam.runMode = runMode;
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new DetectPostprocessThread(kConfigFile_,channelId,smokesleepinfo_); detectPostThreadParam.threadInst = new DetectPostprocessThread(kConfigFile_,channelId,modelInfo_);
std::string DetectPostName = nameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectclsNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
detectPostThreadParam.runMode = runMode; detectPostThreadParam.runMode = runMode;
threadTbl.push_back(detectPostThreadParam); threadTbl.push_back(detectPostThreadParam);
AclLiteThreadParam classifyPreThreadParam; AclLiteThreadParam classifyPreThreadParam;
classifyPreThreadParam.threadInst = new ClassifyPreprocessThread(runMode); classifyPreThreadParam.threadInst = new ClassifyPreprocessThread(runMode,modelInfo_);
std::string ClassifyPreName = nameInfo_.kClassifyPreName + std::to_string(channelId); std::string ClassifyPreName = detectclsNameInfo_.kClassifyPreName + std::to_string(channelId);
classifyPreThreadParam.threadInstName.assign(ClassifyPreName.c_str()); classifyPreThreadParam.threadInstName.assign(ClassifyPreName.c_str());
classifyPreThreadParam.context = context; classifyPreThreadParam.context = context;
classifyPreThreadParam.runMode = runMode; classifyPreThreadParam.runMode = runMode;
threadTbl.push_back(classifyPreThreadParam); threadTbl.push_back(classifyPreThreadParam);
AclLiteThreadParam classifyPostThreadParam; AclLiteThreadParam classifyPostThreadParam;
classifyPostThreadParam.threadInst = new ClassifyPostprocessThread(kConfigFile_, channelId,smokesleepinfo_); classifyPostThreadParam.threadInst = new ClassifyPostprocessThread(kConfigFile_, channelId,modelInfo_);
std::string ClassifyPostName = nameInfo_.kClassifyPostName + std::to_string(channelId); std::string ClassifyPostName = detectclsNameInfo_.kClassifyPostName + std::to_string(channelId);
classifyPostThreadParam.threadInstName.assign(ClassifyPostName.c_str()); classifyPostThreadParam.threadInstName.assign(ClassifyPostName.c_str());
classifyPostThreadParam.context = context; classifyPostThreadParam.context = context;
classifyPostThreadParam.runMode = runMode; classifyPostThreadParam.runMode = runMode;
...@@ -386,7 +386,7 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> & ...@@ -386,7 +386,7 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &
AclLiteThreadParam cocomqttThreadParam; AclLiteThreadParam cocomqttThreadParam;
cocomqttThreadParam.threadInst = new MqttSendThread("0.0.0.0",1883); cocomqttThreadParam.threadInst = new MqttSendThread("0.0.0.0",1883);
std::string mqttName = nameInfo_.kMqttName + std::to_string(channelId); std::string mqttName = detectclsNameInfo_.kMqttName + std::to_string(channelId);
cocomqttThreadParam.threadInstName.assign(mqttName.c_str()); cocomqttThreadParam.threadInstName.assign(mqttName.c_str());
cocomqttThreadParam.context = context; cocomqttThreadParam.context = context;
cocomqttThreadParam.runMode = runMode; cocomqttThreadParam.runMode = runMode;
...@@ -395,7 +395,7 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> & ...@@ -395,7 +395,7 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &
if (readOutputTypeConfig(channelId)) { if (readOutputTypeConfig(channelId)) {
AclLiteThreadParam rtspDisplayThreadParam; AclLiteThreadParam rtspDisplayThreadParam;
rtspDisplayThreadParam.threadInst = new PushRtspThread(kRtspUrl_ + std::to_string(channelId)); rtspDisplayThreadParam.threadInst = new PushRtspThread(kRtspUrl_ + std::to_string(channelId));
std::string RtspDisplayName = nameInfo_.kRtspUrl + std::to_string(channelId); std::string RtspDisplayName = detectclsNameInfo_.kRtspUrl + std::to_string(channelId);
rtspDisplayThreadParam.threadInstName.assign(RtspDisplayName.c_str()); rtspDisplayThreadParam.threadInstName.assign(RtspDisplayName.c_str());
rtspDisplayThreadParam.context = context; rtspDisplayThreadParam.context = context;
rtspDisplayThreadParam.runMode = runMode; rtspDisplayThreadParam.runMode = runMode;
...@@ -404,8 +404,8 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> & ...@@ -404,8 +404,8 @@ void smokerSleepInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &
} }
AclLiteThreadParam InferParam; AclLiteThreadParam InferParam;
InferParam.threadInst = new InferenceThread(runMode,smokesleepinfo_,3); InferParam.threadInst = new InferenceThread(runMode,modelInfo_,3);
std::string InferName = nameInfo_.kInferName + std::to_string(deviceId); std::string InferName = detectclsNameInfo_.kInferName + std::to_string(deviceId);
InferParam.threadInstName.assign(InferName.c_str()); InferParam.threadInstName.assign(InferName.c_str());
InferParam.context = context; InferParam.context = context;
InferParam.runMode = runMode; InferParam.runMode = runMode;
...@@ -419,12 +419,12 @@ yangChenInferenceInstance::yangChenInferenceInstance() { ...@@ -419,12 +419,12 @@ yangChenInferenceInstance::yangChenInferenceInstance() {
void yangChenInferenceInstance::Init(const char *kConfigFile) { void yangChenInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
yangcheninfo_.modelInputWidth = 640; modelInfo_.modelInputWidth = 640;
yangcheninfo_.modelInputHeight = 448; modelInfo_.modelInputHeight = 448;
yangcheninfo_.classnum = 1; modelInfo_.classnum = 1;
yangcheninfo_.conf = 0.4; modelInfo_.conf = 0.4;
yangcheninfo_.totalBox = 17640; modelInfo_.totalBox = 17640;
yangcheninfo_.Label = {"yangchen"}; modelInfo_.Label = {"yangchen"};
detectNameInfo_.kInferName = "yangCheninference"; detectNameInfo_.kInferName = "yangCheninference";
detectNameInfo_.kDetectPreName = "yangChendetectPre"; detectNameInfo_.kDetectPreName = "yangChendetectPre";
detectNameInfo_.kDetectPostName = "yangChendetectPost"; detectNameInfo_.kDetectPostName = "yangChendetectPost";
...@@ -458,7 +458,7 @@ void yangChenInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr ...@@ -458,7 +458,7 @@ void yangChenInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &thr
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, yangcheninfo_); detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, modelInfo_);
std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
...@@ -502,17 +502,17 @@ cocoInferenceInstance::cocoInferenceInstance() { ...@@ -502,17 +502,17 @@ cocoInferenceInstance::cocoInferenceInstance() {
void cocoInferenceInstance::Init(const char *kConfigFile) { void cocoInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile; kConfigFile_ = kConfigFile;
cocopinfo_.modelInputWidth = 640; modelInfo_.modelInputWidth = 640;
cocopinfo_.modelInputHeight = 448; modelInfo_.modelInputHeight = 448;
cocopinfo_.classnum = 80; modelInfo_.classnum = 80;
cocopinfo_.conf = 0.35; modelInfo_.conf = 0.35;
cocopinfo_.totalBox = 17640; modelInfo_.totalBox = 17640;
detectNameInfo_.kInferName = "cocopinference"; detectNameInfo_.kInferName = "cocopinference";
detectNameInfo_.kDetectPreName = "cocopdetectPre"; detectNameInfo_.kDetectPreName = "cocopdetectPre";
detectNameInfo_.kDetectPostName = "cocopdetectPost"; detectNameInfo_.kDetectPostName = "cocopdetectPost";
detectNameInfo_.kRtspUrl = "cocopDisplay"; detectNameInfo_.kRtspUrl = "cocopDisplay";
detectNameInfo_.kMqttName = "cocopmqtt"; detectNameInfo_.kMqttName = "cocopmqtt";
cocopinfo_.activateLabel = {0}; modelInfo_.activateLabel = {0};
} }
...@@ -540,7 +540,7 @@ void cocoInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threadT ...@@ -540,7 +540,7 @@ void cocoInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threadT
threadTbl.push_back(detectPreThreadParam); threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam; AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, cocopinfo_); detectPostThreadParam.threadInst = new onlyDetectPostprocessThread(kConfigFile_, channelId, modelInfo_);
std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId); std::string DetectPostName = detectNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str()); detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context; detectPostThreadParam.context = context;
...@@ -577,4 +577,107 @@ void cocoInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threadT ...@@ -577,4 +577,107 @@ void cocoInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threadT
} }
//----------------------------------------------------------------------------------------------------------------------------------
centerfaceMaskInferenceInstance::centerfaceMaskInferenceInstance() {
}
void centerfaceMaskInferenceInstance::Init(const char *kConfigFile) {
kConfigFile_ = kConfigFile;
modelInfo_.modelInputWidth = 640;
modelInfo_.modelInputHeight = 448;
modelInfo_.classnum = 1;
modelInfo_.conf = 0.4;
modelInfo_.totalBox = 17640;
modelInfo_.clsifyInputWidth = 224;
modelInfo_.clsifyInputHeight = 224;
detectclsNameInfo_.kInferName = "centerfaceinference";
modelInfo_.Label = { "face"};
detectclsNameInfo_.kDetectPreName = "centerfacedetectPre";
detectclsNameInfo_.kDetectPostName = "centerfacedetectPost";
detectclsNameInfo_.kClassifyPreName = "centerfaceclassifyPre";
detectclsNameInfo_.kClassifyPostName = "centerfaceclassifyPost";
detectclsNameInfo_.kRtspUrl = "centerfacertspDisplay";
detectclsNameInfo_.kMqttName = "centerfacesleepmqtt";
modelInfo_.clsLabel = { "nomask","mask",};
}
centerfaceMaskInferenceInstance::~centerfaceMaskInferenceInstance() {
}
void centerfaceMaskInferenceInstance::getInstance(std::vector<AclLiteThreadParam> &threadTbl, int32_t deviceId,
aclrtContext &context, aclrtRunMode &runMode, int rtspNumPerDevice,
bool kDisplay,int addrtspNumPerDevice)
{
AclLiteThreadParam detectPreThreadParam;
for (int index = 0; index < rtspNumPerDevice + addrtspNumPerDevice; index++) {
int channelId = deviceId*rtspNumPerDevice+index;
if (kDisplay) {
detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_, true);
} else {
detectPreThreadParam.threadInst =
new DetectPreprocessThread(kConfigFile_, deviceId, channelId, runMode, detectclsNameInfo_);
}
std::string DetectPreName = detectclsNameInfo_.kDetectPreName + std::to_string(channelId);
detectPreThreadParam.threadInstName.assign(DetectPreName.c_str());
detectPreThreadParam.context = context;
detectPreThreadParam.runMode = runMode;
threadTbl.push_back(detectPreThreadParam);
AclLiteThreadParam detectPostThreadParam;
detectPostThreadParam.threadInst = new centerfacePostprocessThread(kConfigFile_,channelId,modelInfo_);
std::string DetectPostName = detectclsNameInfo_.kDetectPostName + std::to_string(channelId);
detectPostThreadParam.threadInstName.assign(DetectPostName.c_str());
detectPostThreadParam.context = context;
detectPostThreadParam.runMode = runMode;
threadTbl.push_back(detectPostThreadParam);
AclLiteThreadParam classifyPreThreadParam;
classifyPreThreadParam.threadInst = new ClassifyPreprocessThread(runMode,modelInfo_);
std::string ClassifyPreName = detectclsNameInfo_.kClassifyPreName + std::to_string(channelId);
classifyPreThreadParam.threadInstName.assign(ClassifyPreName.c_str());
classifyPreThreadParam.context = context;
classifyPreThreadParam.runMode = runMode;
threadTbl.push_back(classifyPreThreadParam);
AclLiteThreadParam classifyPostThreadParam;
classifyPostThreadParam.threadInst = new ClassifyPostprocessThread(kConfigFile_, channelId,modelInfo_);
std::string ClassifyPostName = detectclsNameInfo_.kClassifyPostName + std::to_string(channelId);
classifyPostThreadParam.threadInstName.assign(ClassifyPostName.c_str());
classifyPostThreadParam.context = context;
classifyPostThreadParam.runMode = runMode;
threadTbl.push_back(classifyPostThreadParam);
AclLiteThreadParam cocomqttThreadParam;
cocomqttThreadParam.threadInst = new MqttSendThread("0.0.0.0",1883);
std::string mqttName = detectclsNameInfo_.kMqttName + std::to_string(channelId);
cocomqttThreadParam.threadInstName.assign(mqttName.c_str());
cocomqttThreadParam.context = context;
cocomqttThreadParam.runMode = runMode;
threadTbl.push_back(cocomqttThreadParam);
if (readOutputTypeConfig(channelId)) {
AclLiteThreadParam rtspDisplayThreadParam;
rtspDisplayThreadParam.threadInst = new PushRtspThread(kRtspUrl_ + std::to_string(channelId));
std::string RtspDisplayName = detectclsNameInfo_.kRtspUrl + std::to_string(channelId);
rtspDisplayThreadParam.threadInstName.assign(RtspDisplayName.c_str());
rtspDisplayThreadParam.context = context;
rtspDisplayThreadParam.runMode = runMode;
threadTbl.push_back(rtspDisplayThreadParam);
}
}
AclLiteThreadParam InferParam;
InferParam.threadInst = new InferenceThread(runMode,modelInfo_,6);
std::string InferName = detectclsNameInfo_.kInferName + std::to_string(deviceId);
InferParam.threadInstName.assign(InferName.c_str());
InferParam.context = context;
InferParam.runMode = runMode;
threadTbl.push_back(InferParam);
}
//---------------------------------------------------------------------------------------------------------------------------------- //----------------------------------------------------------------------------------------------------------------------------------
\ No newline at end of file
...@@ -300,6 +300,10 @@ AclLiteError InferenceThread::ModelSwith(int type) { ...@@ -300,6 +300,10 @@ AclLiteError InferenceThread::ModelSwith(int type) {
kDetectModelPath_ = "../model/coco/yolo5s_coco_640x448_yuvaipp448_rgb.om"; kDetectModelPath_ = "../model/coco/yolo5s_coco_640x448_yuvaipp448_rgb.om";
kClassifyModelPath_ = "../model/color_dynamic_batch.om"; kClassifyModelPath_ = "../model/color_dynamic_batch.om";
break; break;
case INSTANCENAME::MASK_INFERENCE_INSTANCE:
kDetectModelPath_ = "../model/facemask/centerface_640x448_aipp.om";
kClassifyModelPath_ = "../model/facemask/sbd_mask_dynamic.om";
break;
default : return ACLLITE_ERROR; default : return ACLLITE_ERROR;
} }
return ACLLITE_OK; return ACLLITE_OK;
......
...@@ -258,7 +258,8 @@ int main(int argc, char** argv) ...@@ -258,7 +258,8 @@ int main(int argc, char** argv)
" 火焰烟雾检测 id = 2" " 火焰烟雾检测 id = 2"
" 吸烟睡岗检测 id = 3" " 吸烟睡岗检测 id = 3"
" coco 人员检测 id = 4" " coco 人员检测 id = 4"
" 扬尘检测 id = 5", true,1); " 扬尘检测 id = 5"
" 口罩检测 id = 6", true,1);
// const int CAR_COLOR_INFERENCE_INSTANCE = 0; // const int CAR_COLOR_INFERENCE_INSTANCE = 0;
// const int HELMET_INFERENCE_INSTANCE = 1; // const int HELMET_INFERENCE_INSTANCE = 1;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment