• 【Opencv450】HOG+SVM 与Hog+cascade进行行人检测


    因为从opencv3.0开始不再支持hog+cascade级联分类器。github上有人从opencv2.x导出了个hogcascade类,引入opencv4.x可以使用。

     

    行人检测

    带孔木块级联分类器识别效果 

    主程序源码:

    1. #include <iostream>
    2. #include <string>
    3. #include <opencv2/core/core.hpp>
    4. #include <opencv2/highgui/highgui.hpp>
    5. #include <opencv2/imgproc/imgproc.hpp>
    6. #include <opencv2/objdetect/objdetect.hpp>
    7. #include <opencv2/ml/ml.hpp>
    8. #include<ctime>
    9. #include "hogcascade.hpp"
    10. using namespace std;
    11. using namespace cv;
    12. int main()
    13. {
    14. cout << "Red:Hog+svm------Green:Hog+cascade" << endl;
    15. Mat src = imread("1.jpg", 1);
    16. vector<Rect> found1, found_filtered1, found2, found_filtered2;//矩形框数组
    17. clock_t start1, end1, start2, end2;
    18. //方法1,Hog+svm
    19. start1 = clock();
    20. HOGDescriptor hog;//HOG特征检测器
    21. hog.setSVMDetector(HOGDescriptor::getDefaultPeopleDetector());//设置SVM分类器为默认参数
    22. hog.detectMultiScale(src, found1, 0, Size(2, 2), Size(0, 0), 1.05, 2);//对图像进行多尺度检测,检测窗口移动步长为(8,8)
    23. end1 = (double)(1000 * (clock() - start1) / CLOCKS_PER_SEC);
    24. //方法2.Hog+cascade
    25. start2 = clock();
    26. cv::HOGCascadeClassifier hogclassifier;
    27. //CascadeClassifier* cascade = new CascadeClassifier;
    28. hogclassifier.load("hogcascade_pedestrians.xml");//hogcascade_pedestrians.xml
    29. //hogclassifier.load("blockhogcascade.xml"); //带孔木块 hog+cascade级联分类器
    30. //hogclassifier.detectMultiScale(src, found2,1.05,5,0,Size(400,400),Size(800,800));//带孔木块
    31. hogclassifier.detectMultiScale(src, found2);
    32. end2 = (double)(1000 * (clock() - start2) / CLOCKS_PER_SEC);
    33. cout << "Hog+svm: " << end1 << "ms" << " Hog+cascade: " << end2 << "ms" << endl;
    34. //找出所有没有嵌套的矩形框r,并放入found_filtered中,如果有嵌套的话,则取外面最大的那个矩形框放入found_filtered中
    35. for (int i = 0; i < found1.size(); i++)
    36. {
    37. Rect r = found1[i];
    38. int j = 0;
    39. for (; j < found1.size(); j++)
    40. if (j != i && (r & found1[j]) == r)
    41. break;
    42. if (j == found1.size())
    43. found_filtered1.push_back(r);
    44. }
    45. for (int i = 0; i < found2.size(); i++)
    46. {
    47. Rect r = found2[i];
    48. int j = 0;
    49. for (; j < found2.size(); j++)
    50. if (j != i && (r & found2[j]) == r)
    51. break;
    52. if (j == found2.size())
    53. found_filtered2.push_back(r);
    54. }
    55. //画矩形框,因为hog检测出的矩形框比实际人体框要稍微大些,所以这里需要做一些调整
    56. for (int i = 0; i < found_filtered1.size(); i++)
    57. {
    58. Rect r = found_filtered1[i];
    59. r.x += cvRound(r.width * 0.1);
    60. r.width = cvRound(r.width * 0.8);
    61. r.y += cvRound(r.height * 0.07);
    62. r.height = cvRound(r.height * 0.8);
    63. rectangle(src, r.tl(), r.br(), Scalar(0, 0, 255), 3);//红色 hog+svm
    64. }
    65. for (int i = 0; i < found_filtered2.size(); i++)
    66. {
    67. Rect r = found_filtered2[i];
    68. r.x += cvRound(r.width * 0.1);
    69. r.width = cvRound(r.width * 0.8);
    70. r.y += cvRound(r.height * 0.07);
    71. r.height = cvRound(r.height * 0.8);
    72. rectangle(src, r.tl(), r.br(), Scalar(0, 255, 0), 3);//绿色: hog+cascade
    73. }
    74. //resize(src,src,Size(src.size()/3)); //带孔木块识别 缩放
    75. imshow("src", src);
    76. waitKey();
    77. system("pause");
    78. return 0;
    79. }

    hogcascade.h

    1. /*M///
    2. //
    3. // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
    4. //
    5. // By downloading, copying, installing or using the software you agree to this license.
    6. // If you do not agree to this license, do not download, install,
    7. // copy or use the software.
    8. //
    9. //
    10. // Intel License Agreement
    11. // For Open Source Computer Vision Library
    12. //
    13. // Copyright (C) 2000, Intel Corporation, all rights reserved.
    14. // Third party copyrights are property of their respective owners.
    15. //
    16. // Redistribution and use in source and binary forms, with or without modification,
    17. // are permitted provided that the following conditions are met:
    18. //
    19. // * Redistribution's of source code must retain the above copyright notice,
    20. // this list of conditions and the following disclaimer.
    21. //
    22. // * Redistribution's in binary form must reproduce the above copyright notice,
    23. // this list of conditions and the following disclaimer in the documentation
    24. // and/or other materials provided with the distribution.
    25. //
    26. // * The name of Intel Corporation may not be used to endorse or promote products
    27. // derived from this software without specific prior written permission.
    28. //
    29. // This software is provided by the copyright holders and contributors "as is" and
    30. // any express or implied warranties, including, but not limited to, the implied
    31. // warranties of merchantability and fitness for a particular purpose are disclaimed.
    32. // In no event shall the Intel Corporation or contributors be liable for any direct,
    33. // indirect, incidental, special, exemplary, or consequential damages
    34. // (including, but not limited to, procurement of substitute goods or services;
    35. // loss of use, data, or profits; or business interruption) however caused
    36. // and on any theory of liability, whether in contract, strict liability,
    37. // or tort (including negligence or otherwise) arising in any way out of
    38. // the use of this software, even if advised of the possibility of such damage.
    39. //
    40. //M*/
    41. #pragma once
    42. #include <opencv2/opencv.hpp>
    43. #include <string>
    44. namespace cv
    45. {
    46. using std::vector;
    47. #define CCC_CASCADE_PARAMS "cascadeParams"
    48. #define CCC_STAGE_TYPE "stageType"
    49. #define CCC_FEATURE_TYPE "featureType"
    50. #define CCC_HEIGHT "height"
    51. #define CCC_WIDTH "width"
    52. #define CCC_STAGE_NUM "stageNum"
    53. #define CCC_STAGES "stages"
    54. #define CCC_STAGE_PARAMS "stageParams"
    55. #define CCC_BOOST "BOOST"
    56. #define CCC_MAX_DEPTH "maxDepth"
    57. #define CCC_WEAK_COUNT "maxWeakCount"
    58. #define CCC_STAGE_THRESHOLD "stageThreshold"
    59. #define CCC_WEAK_CLASSIFIERS "weakClassifiers"
    60. #define CCC_INTERNAL_NODES "internalNodes"
    61. #define CCC_LEAF_VALUES "leafValues"
    62. #define CCC_FEATURES "features"
    63. #define CCC_FEATURE_PARAMS "featureParams"
    64. #define CCC_MAX_CAT_COUNT "maxCatCount"
    65. #define CCV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \
    66. /* (x, y) */ \
    67. (p0) = sum + (rect).x + (step) * (rect).y, \
    68. /* (x + w, y) */ \
    69. (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \
    70. /* (x + w, y) */ \
    71. (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \
    72. /* (x + w, y + h) */ \
    73. (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height)
    74. //#define CCV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \
    75. // /* (x, y) */ \
    76. // (p0) = tilted + (rect).x + (step) * (rect).y, \
    77. // /* (x - h, y + h) */ \
    78. // (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \
    79. // /* (x + w, y + w) */ \
    80. // (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \
    81. // /* (x + w - h, y + w + h) */ \
    82. // (p3) = tilted + (rect).x + (rect).width - (rect).height \
    83. // + (step) * ((rect).y + (rect).width + (rect).height)
    84. #define CCALC_SUM_(p0, p1, p2, p3, offset) \
    85. ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset])
    86. #define CCALC_SUM(rect,offset) CCALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset)
    87. //#define CCC_HAAR "HAAR"
    88. //#define CCC_RECTS "rects"
    89. //#define CCC_TILTED "tilted"
    90. //
    91. //#define CCC_LBP "LBP"
    92. #define CCC_RECT "rect"
    93. #define CCC_HOG "HOG"
    94. class HOGEvaluator
    95. {
    96. public:
    97. static const int HOG = 2;
    98. struct Feature
    99. {
    100. Feature();
    101. float calc( int offset ) const;
    102. void updatePtrs( const vector<Mat>& _hist, const Mat &_normSum );
    103. bool read( const FileNode& node );
    104. enum { CELL_NUM = 4, BIN_NUM = 9 };
    105. Rect rect[CELL_NUM];
    106. int featComponent; //component index from 0 to 35
    107. const float* pF[4]; //for feature calculation
    108. const float* pN[4]; //for normalization calculation
    109. };
    110. HOGEvaluator();
    111. virtual ~HOGEvaluator();
    112. virtual bool read( const FileNode& node );
    113. virtual Ptr<HOGEvaluator> clone() const;
    114. virtual int getFeatureType() const { return HOGEvaluator::HOG; }
    115. virtual bool setImage( const Mat& image, Size winSize );
    116. virtual bool setWindow( Point pt );
    117. double operator()(int featureIdx) const
    118. {
    119. return featuresPtr[featureIdx].calc(offset);
    120. }
    121. virtual double calcOrd( int featureIdx ) const
    122. {
    123. return (*this)(featureIdx);
    124. }
    125. private:
    126. virtual void integralHistogram( const Mat& srcImage, vector<Mat> &histogram, Mat &norm, int nbins ) const;
    127. Size origWinSize;
    128. Ptr<vector<Feature> > features;
    129. Feature* featuresPtr;
    130. vector<Mat> hist;
    131. Mat normSum;
    132. int offset;
    133. };
    134. class CV_EXPORTS_W HOGCascadeClassifier
    135. {
    136. public:
    137. CV_WRAP HOGCascadeClassifier();
    138. CV_WRAP HOGCascadeClassifier(const std::string& filename);
    139. virtual ~HOGCascadeClassifier();
    140. CV_WRAP virtual bool empty() const;
    141. CV_WRAP bool load(const std::string& filename);
    142. virtual bool read( const FileNode& node );
    143. CV_WRAP void detectMultiScale(const Mat& image,
    144. CV_OUT std::vector<Rect>& objects,
    145. double scaleFactor = 1.1,
    146. int minNeighbors = 3, int flags = 0,
    147. Size minSize = Size(),
    148. Size maxSize = Size());
    149. CV_WRAP void detectMultiScale( const Mat& image, vector<Rect>& objects,
    150. vector<int>& rejectLevels,
    151. vector<double>& levelWeights,
    152. double scaleFactor=1.1,
    153. int minNeighbors=3, int flags=0,
    154. Size minSize=Size(),
    155. Size maxSize=Size(),
    156. bool outputRejectLevels=false );
    157. class CV_EXPORTS MaskGenerator
    158. {
    159. public:
    160. virtual ~MaskGenerator() {}
    161. virtual cv::Mat generateMask(const cv::Mat& src)=0;
    162. virtual void initializeMask(const cv::Mat& /*src*/) {};
    163. };
    164. void setMaskGenerator(Ptr<MaskGenerator> maskGenerator);
    165. Ptr<MaskGenerator> getMaskGenerator();
    166. protected:
    167. Ptr<MaskGenerator> maskGenerator;
    168. virtual int runAt( Ptr<HOGEvaluator>& feval, Point pt, double& weight );
    169. virtual bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
    170. int stripSize, int yStep, double factor, vector<Rect>& candidates,
    171. vector<int>& rejectLevels, vector<double>& levelWeights, bool outputRejectLevels=false);
    172. friend class HOGCascadeClassifierInvoker;
    173. friend int HOGpredictOrdered( HOGCascadeClassifier& cascade, Ptr<HOGEvaluator> &featureEvaluator, double& weight);
    174. friend int HOGpredictOrderedStump( HOGCascadeClassifier& cascade, Ptr<HOGEvaluator> &featureEvaluator, double& weight);
    175. class Data
    176. {
    177. public:
    178. struct CV_EXPORTS DTreeNode
    179. {
    180. int featureIdx;
    181. float threshold; // for ordered features only
    182. int left;
    183. int right;
    184. };
    185. struct CV_EXPORTS DTree
    186. {
    187. int nodeCount;
    188. };
    189. struct CV_EXPORTS Stage
    190. {
    191. int first;
    192. int ntrees;
    193. float threshold;
    194. };
    195. bool read(const FileNode &node);
    196. bool isStumpBased;
    197. int stageType;
    198. int featureType;
    199. int ncategories;
    200. Size origWinSize;
    201. vector<Stage> stages;
    202. vector<DTree> classifiers;
    203. vector<DTreeNode> nodes;
    204. vector<float> leaves;
    205. vector<int> subsets;
    206. };
    207. Data data;
    208. Ptr<HOGEvaluator> featureEvaluator;
    209. };
    210. inline HOGEvaluator::Feature :: Feature()
    211. {
    212. rect[0] = rect[1] = rect[2] = rect[3] = Rect();
    213. pF[0] = pF[1] = pF[2] = pF[3] = 0;
    214. pN[0] = pN[1] = pN[2] = pN[3] = 0;
    215. featComponent = 0;
    216. }
    217. inline float HOGEvaluator::Feature :: calc( int _offset ) const
    218. {
    219. float res = CCALC_SUM(pF, _offset);
    220. float normFactor = CCALC_SUM(pN, _offset);
    221. res = (res > 0.001f) ? (res / ( normFactor + 0.001f) ) : 0.f;
    222. return res;
    223. }
    224. inline void HOGEvaluator::Feature :: updatePtrs( const vector<Mat> &_hist, const Mat &_normSum )
    225. {
    226. int binIdx = featComponent % BIN_NUM;
    227. int cellIdx = featComponent / BIN_NUM;
    228. Rect normRect = Rect( rect[0].x, rect[0].y, 2*rect[0].width, 2*rect[0].height );
    229. const float* featBuf = (const float*)_hist[binIdx].data;
    230. size_t featStep = _hist[0].step / sizeof(featBuf[0]);
    231. const float* normBuf = (const float*)_normSum.data;
    232. size_t normStep = _normSum.step / sizeof(normBuf[0]);
    233. CCV_SUM_PTRS( pF[0], pF[1], pF[2], pF[3], featBuf, rect[cellIdx], featStep );
    234. CCV_SUM_PTRS( pN[0], pN[1], pN[2], pN[3], normBuf, normRect, normStep );
    235. }
    236. inline int HOGpredictOrdered( HOGCascadeClassifier& cascade, Ptr<HOGEvaluator> &_featureEvaluator, double& sum )
    237. {
    238. int nstages = (int)cascade.data.stages.size();
    239. int nodeOfs = 0, leafOfs = 0;
    240. HOGEvaluator& featureEvaluator = (HOGEvaluator&)*_featureEvaluator;
    241. float* cascadeLeaves = &cascade.data.leaves[0];
    242. HOGCascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
    243. HOGCascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
    244. HOGCascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
    245. for( int si = 0; si < nstages; si++ )
    246. {
    247. HOGCascadeClassifier::Data::Stage& stage = cascadeStages[si];
    248. int wi, ntrees = stage.ntrees;
    249. sum = 0;
    250. for( wi = 0; wi < ntrees; wi++ )
    251. {
    252. HOGCascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
    253. int idx = 0, root = nodeOfs;
    254. do
    255. {
    256. HOGCascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
    257. double val = featureEvaluator(node.featureIdx);
    258. idx = val < node.threshold ? node.left : node.right;
    259. }
    260. while( idx > 0 );
    261. sum += cascadeLeaves[leafOfs - idx];
    262. nodeOfs += weak.nodeCount;
    263. leafOfs += weak.nodeCount + 1;
    264. }
    265. if( sum < stage.threshold )
    266. return -si;
    267. }
    268. return 1;
    269. }
    270. inline int HOGpredictOrderedStump( HOGCascadeClassifier& cascade, Ptr<HOGEvaluator> &_featureEvaluator, double& sum )
    271. {
    272. int nodeOfs = 0, leafOfs = 0;
    273. HOGEvaluator& featureEvaluator = (HOGEvaluator&)*_featureEvaluator;
    274. float* cascadeLeaves = &cascade.data.leaves[0];
    275. HOGCascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
    276. HOGCascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
    277. int nstages = (int)cascade.data.stages.size();
    278. for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
    279. {
    280. HOGCascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx];
    281. sum = 0.0;
    282. int ntrees = stage.ntrees;
    283. for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
    284. {
    285. HOGCascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
    286. double value = featureEvaluator(node.featureIdx);
    287. sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ];
    288. }
    289. if( sum < stage.threshold )
    290. return -stageIdx;
    291. }
    292. return 1;
    293. }
    294. }

    hogcascade.cpp

    1. /*M///
    2. //
    3. // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
    4. //
    5. // By downloading, copying, installing or using the software you agree to this license.
    6. // If you do not agree to this license, do not download, install,
    7. // copy or use the software.
    8. //
    9. //
    10. // Intel License Agreement
    11. // For Open Source Computer Vision Library
    12. //
    13. // Copyright (C) 2000, Intel Corporation, all rights reserved.
    14. // Third party copyrights are property of their respective owners.
    15. //
    16. // Redistribution and use in source and binary forms, with or without modification,
    17. // are permitted provided that the following conditions are met:
    18. //
    19. // * Redistribution's of source code must retain the above copyright notice,
    20. // this list of conditions and the following disclaimer.
    21. //
    22. // * Redistribution's in binary form must reproduce the above copyright notice,
    23. // this list of conditions and the following disclaimer in the documentation
    24. // and/or other materials provided with the distribution.
    25. //
    26. // * The name of Intel Corporation may not be used to endorse or promote products
    27. // derived from this software without specific prior written permission.
    28. //
    29. // This software is provided by the copyright holders and contributors "as is" and
    30. // any express or implied warranties, including, but not limited to, the implied
    31. // warranties of merchantability and fitness for a particular purpose are disclaimed.
    32. // In no event shall the Intel Corporation or contributors be liable for any direct,
    33. // indirect, incidental, special, exemplary, or consequential damages
    34. // (including, but not limited to, procurement of substitute goods or services;
    35. // loss of use, data, or profits; or business interruption) however caused
    36. // and on any theory of liability, whether in contract, strict liability,
    37. // or tort (including negligence or otherwise) arising in any way out of
    38. // the use of this software, even if advised of the possibility of such damage.
    39. //
    40. //M*/
    41. #include "hogcascade.hpp"
    42. #include <cassert>
    43. namespace cv
    44. {
    45. using std::string;
    46. class HOGCascadeClassifierInvoker : public ParallelLoopBody
    47. {
    48. public:
    49. HOGCascadeClassifierInvoker( HOGCascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
    50. vector<Rect>& _vec, vector<int>& _levels, vector<double>& _weights, bool outputLevels, const Mat& _mask, Mutex* _mtx)
    51. {
    52. classifier = &_cc;
    53. processingRectSize = _sz1;
    54. stripSize = _stripSize;
    55. yStep = _yStep;
    56. scalingFactor = _factor;
    57. rectangles = &_vec;
    58. rejectLevels = outputLevels ? &_levels : 0;
    59. levelWeights = outputLevels ? &_weights : 0;
    60. mask = _mask;
    61. mtx = _mtx;
    62. }
    63. void operator()(const Range& range) const
    64. {
    65. Ptr<HOGEvaluator> evaluator = classifier->featureEvaluator->clone();
    66. Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor), cvRound(classifier->data.origWinSize.height * scalingFactor));
    67. int y1 = range.start * stripSize;
    68. int y2 = min(range.end * stripSize, processingRectSize.height);
    69. for( int y = y1; y < y2; y += yStep )
    70. {
    71. for( int x = 0; x < processingRectSize.width; x += yStep )
    72. {
    73. if ( (!mask.empty()) && (mask.at<uchar>(Point(x,y))==0)) {
    74. continue;
    75. }
    76. double gypWeight;
    77. int result = classifier->runAt(evaluator, Point(x, y), gypWeight);
    78. if( rejectLevels )
    79. {
    80. if( result == 1 )
    81. result = -(int)classifier->data.stages.size();
    82. if( classifier->data.stages.size() + result < 4 )
    83. {
    84. mtx->lock();
    85. rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor), winSize.width, winSize.height));
    86. rejectLevels->push_back(-result);
    87. levelWeights->push_back(gypWeight);
    88. mtx->unlock();
    89. }
    90. }
    91. else if( result > 0 )
    92. {
    93. mtx->lock();
    94. rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor),
    95. winSize.width, winSize.height));
    96. mtx->unlock();
    97. }
    98. if( result == 0 )
    99. x += yStep;
    100. }
    101. }
    102. }
    103. HOGCascadeClassifier* classifier;
    104. vector<Rect>* rectangles;
    105. Size processingRectSize;
    106. int stripSize, yStep;
    107. double scalingFactor;
    108. vector<int> *rejectLevels;
    109. vector<double> *levelWeights;
    110. Mat mask;
    111. Mutex* mtx;
    112. };
    113. bool HOGEvaluator::Feature :: read( const FileNode& node )
    114. {
    115. FileNode rnode = node[CCC_RECT];
    116. FileNodeIterator it = rnode.begin();
    117. it >> rect[0].x >> rect[0].y >> rect[0].width >> rect[0].height >> featComponent;
    118. rect[1].x = rect[0].x + rect[0].width;
    119. rect[1].y = rect[0].y;
    120. rect[2].x = rect[0].x;
    121. rect[2].y = rect[0].y + rect[0].height;
    122. rect[3].x = rect[0].x + rect[0].width;
    123. rect[3].y = rect[0].y + rect[0].height;
    124. rect[1].width = rect[2].width = rect[3].width = rect[0].width;
    125. rect[1].height = rect[2].height = rect[3].height = rect[0].height;
    126. return true;
    127. }
    128. HOGEvaluator::HOGEvaluator()
    129. {
    130. features = Ptr<vector<Feature> >(new vector<Feature>());
    131. }
    132. HOGEvaluator::~HOGEvaluator()
    133. {
    134. }
    135. bool HOGEvaluator::read( const FileNode& node )
    136. {
    137. features->resize(node.size());
    138. featuresPtr = &(*features)[0];
    139. FileNodeIterator it = node.begin(), it_end = node.end();
    140. for(int i = 0; it != it_end; ++it, i++)
    141. {
    142. if(!featuresPtr[i].read(*it))
    143. return false;
    144. }
    145. return true;
    146. }
    147. Ptr<HOGEvaluator> HOGEvaluator::clone() const
    148. {
    149. Ptr<HOGEvaluator> ret = Ptr<HOGEvaluator>(new HOGEvaluator);
    150. ret->origWinSize = origWinSize;
    151. ret->features = features;
    152. ret->featuresPtr = &(*ret->features)[0];
    153. ret->offset = offset;
    154. ret->hist = hist;
    155. ret->normSum = normSum;
    156. return ret;
    157. }
    158. bool HOGEvaluator::setImage( const Mat& image, Size winSize )
    159. {
    160. int rows = image.rows + 1;
    161. int cols = image.cols + 1;
    162. origWinSize = winSize;
    163. if( image.cols < origWinSize.width || image.rows < origWinSize.height )
    164. return false;
    165. hist.clear();
    166. for( int bin = 0; bin < Feature::BIN_NUM; bin++ )
    167. {
    168. hist.push_back( Mat(rows, cols, CV_32FC1) );
    169. }
    170. normSum.create( rows, cols, CV_32FC1 );
    171. integralHistogram( image, hist, normSum, Feature::BIN_NUM );
    172. size_t featIdx, featCount = features->size();
    173. for( featIdx = 0; featIdx < featCount; featIdx++ )
    174. {
    175. featuresPtr[featIdx].updatePtrs( hist, normSum );
    176. }
    177. return true;
    178. }
    179. bool HOGEvaluator::setWindow(Point pt)
    180. {
    181. if( pt.x < 0 || pt.y < 0 ||
    182. pt.x + origWinSize.width >= hist[0].cols-2 ||
    183. pt.y + origWinSize.height >= hist[0].rows-2 )
    184. return false;
    185. offset = pt.y * ((int)hist[0].step/sizeof(float)) + pt.x;
    186. return true;
    187. }
    188. void HOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, Mat &norm, int nbins) const
    189. {
    190. CV_Assert( img.type() == CV_8U || img.type() == CV_8UC3 );
    191. int x, y, binIdx;
    192. Size gradSize(img.size());
    193. Size histSize(histogram[0].size());
    194. Mat grad(gradSize, CV_32F);
    195. Mat qangle(gradSize, CV_8U);
    196. AutoBuffer<int> mapbuf(gradSize.width + gradSize.height + 4);
    197. int* xmap = (int*)mapbuf + 1;
    198. int* ymap = xmap + gradSize.width + 2;
    199. const int borderType = (int)BORDER_REPLICATE;
    200. for( x = -1; x < gradSize.width + 1; x++ )
    201. xmap[x] = borderInterpolate(x, gradSize.width, borderType);
    202. for( y = -1; y < gradSize.height + 1; y++ )
    203. ymap[y] = borderInterpolate(y, gradSize.height, borderType);
    204. int width = gradSize.width;
    205. AutoBuffer<float> _dbuf(width*4);
    206. float* dbuf = _dbuf;
    207. Mat Dx(1, width, CV_32F, dbuf);
    208. Mat Dy(1, width, CV_32F, dbuf + width);
    209. Mat Mag(1, width, CV_32F, dbuf + width*2);
    210. Mat Angle(1, width, CV_32F, dbuf + width*3);
    211. float angleScale = (float)(nbins/CV_PI);
    212. for( y = 0; y < gradSize.height; y++ )
    213. {
    214. const uchar* currPtr = img.data + img.step*ymap[y];
    215. const uchar* prevPtr = img.data + img.step*ymap[y-1];
    216. const uchar* nextPtr = img.data + img.step*ymap[y+1];
    217. float* gradPtr = (float*)grad.ptr(y);
    218. uchar* qanglePtr = (uchar*)qangle.ptr(y);
    219. for( x = 0; x < width; x++ )
    220. {
    221. dbuf[x] = (float)(currPtr[xmap[x+1]] - currPtr[xmap[x-1]]);
    222. dbuf[width + x] = (float)(nextPtr[xmap[x]] - prevPtr[xmap[x]]);
    223. }
    224. cartToPolar( Dx, Dy, Mag, Angle, false );
    225. for( x = 0; x < width; x++ )
    226. {
    227. float mag = dbuf[x+width*2];
    228. float angle = dbuf[x+width*3];
    229. angle = angle*angleScale - 0.5f;
    230. int bidx = cvFloor(angle);
    231. angle -= bidx;
    232. if( bidx < 0 )
    233. bidx += nbins;
    234. else if( bidx >= nbins )
    235. bidx -= nbins;
    236. qanglePtr[x] = (uchar)bidx;
    237. gradPtr[x] = mag;
    238. }
    239. }
    240. integral(grad, norm, grad.depth());
    241. float* histBuf;
    242. const float* magBuf;
    243. const uchar* binsBuf;
    244. int binsStep = (int)( qangle.step / sizeof(uchar) );
    245. int histStep = (int)( histogram[0].step / sizeof(float) );
    246. int magStep = (int)( grad.step / sizeof(float) );
    247. for( binIdx = 0; binIdx < nbins; binIdx++ )
    248. {
    249. histBuf = (float*)histogram[binIdx].data;
    250. magBuf = (const float*)grad.data;
    251. binsBuf = (const uchar*)qangle.data;
    252. memset( histBuf, 0, histSize.width * sizeof(histBuf[0]) );
    253. histBuf += histStep + 1;
    254. for( y = 0; y < qangle.rows; y++ )
    255. {
    256. histBuf[-1] = 0.f;
    257. float strSum = 0.f;
    258. for( x = 0; x < qangle.cols; x++ )
    259. {
    260. if( binsBuf[x] == binIdx )
    261. strSum += magBuf[x];
    262. histBuf[x] = histBuf[-histStep + x] + strSum;
    263. }
    264. histBuf += histStep;
    265. binsBuf += binsStep;
    266. magBuf += magStep;
    267. }
    268. }
    269. }
    270. HOGCascadeClassifier::HOGCascadeClassifier()
    271. {
    272. }
    273. HOGCascadeClassifier::HOGCascadeClassifier(const std::string& filename)
    274. {
    275. load(filename);
    276. }
    277. HOGCascadeClassifier::~HOGCascadeClassifier()
    278. {
    279. }
    280. bool HOGCascadeClassifier::empty() const
    281. {
    282. return data.stages.empty();
    283. }
    284. bool HOGCascadeClassifier::load(const std::string& filename)
    285. {
    286. data = Data();
    287. featureEvaluator.release();
    288. FileStorage fs(filename, FileStorage::READ);
    289. if( !fs.isOpened() )
    290. return false;
    291. if( read(fs.getFirstTopLevelNode()) )
    292. return true;
    293. fs.release();
    294. return false;
    295. }
    296. bool HOGCascadeClassifier::Data::read(const FileNode &root)
    297. {
    298. static const float THRESHOLD_EPS = 1e-5f;
    299. // load stage params
    300. string stageTypeStr = (string)root[CCC_STAGE_TYPE];
    301. if( stageTypeStr == CCC_BOOST )
    302. stageType = 0;
    303. else
    304. return false;
    305. string featureTypeStr = (string)root[CCC_FEATURE_TYPE];
    306. if( featureTypeStr == CCC_HOG )
    307. featureType = HOGEvaluator::HOG;
    308. else
    309. return false;
    310. origWinSize.width = (int)root[CCC_WIDTH];
    311. origWinSize.height = (int)root[CCC_HEIGHT];
    312. CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 );
    313. isStumpBased = (int)(root[CCC_STAGE_PARAMS][CCC_MAX_DEPTH]) == 1 ? true : false;
    314. // load feature params
    315. FileNode fn = root[CCC_FEATURE_PARAMS];
    316. if( fn.empty() )
    317. return false;
    318. ncategories = fn[CCC_MAX_CAT_COUNT];
    319. int subsetSize = (ncategories + 31)/32,
    320. nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 );
    321. // load stages
    322. fn = root[CCC_STAGES];
    323. if( fn.empty() )
    324. return false;
    325. stages.reserve(fn.size());
    326. classifiers.clear();
    327. nodes.clear();
    328. FileNodeIterator it = fn.begin(), it_end = fn.end();
    329. for( int si = 0; it != it_end; si++, ++it )
    330. {
    331. FileNode fns = *it;
    332. Stage stage;
    333. stage.threshold = (float)fns[CCC_STAGE_THRESHOLD] - THRESHOLD_EPS;
    334. fns = fns[CCC_WEAK_CLASSIFIERS];
    335. if(fns.empty())
    336. return false;
    337. stage.ntrees = (int)fns.size();
    338. stage.first = (int)classifiers.size();
    339. stages.push_back(stage);
    340. classifiers.reserve(stages[si].first + stages[si].ntrees);
    341. FileNodeIterator it1 = fns.begin(), it1_end = fns.end();
    342. for( ; it1 != it1_end; ++it1 ) // weak trees
    343. {
    344. FileNode fnw = *it1;
    345. FileNode internalNodes = fnw[CCC_INTERNAL_NODES];
    346. FileNode leafValues = fnw[CCC_LEAF_VALUES];
    347. if( internalNodes.empty() || leafValues.empty() )
    348. return false;
    349. DTree tree;
    350. tree.nodeCount = (int)internalNodes.size()/nodeStep;
    351. classifiers.push_back(tree);
    352. nodes.reserve(nodes.size() + tree.nodeCount);
    353. leaves.reserve(leaves.size() + leafValues.size());
    354. if( subsetSize > 0 )
    355. subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
    356. FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end();
    357. for( ; internalNodesIter != internalNodesEnd; ) // nodes
    358. {
    359. DTreeNode node;
    360. node.left = (int)*internalNodesIter; ++internalNodesIter;
    361. node.right = (int)*internalNodesIter; ++internalNodesIter;
    362. node.featureIdx = (int)*internalNodesIter; ++internalNodesIter;
    363. if( subsetSize > 0 )
    364. {
    365. for( int j = 0; j < subsetSize; j++, ++internalNodesIter )
    366. subsets.push_back((int)*internalNodesIter);
    367. node.threshold = 0.f;
    368. }
    369. else
    370. {
    371. node.threshold = (float)*internalNodesIter; ++internalNodesIter;
    372. }
    373. nodes.push_back(node);
    374. }
    375. internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end();
    376. for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves
    377. leaves.push_back((float)*internalNodesIter);
    378. }
    379. }
    380. return true;
    381. }
    382. bool HOGCascadeClassifier::read(const FileNode& root)
    383. {
    384. if( !data.read(root) )
    385. return false;
    386. // load features
    387. featureEvaluator = Ptr<HOGEvaluator>(new HOGEvaluator);
    388. FileNode fn = root[CCC_FEATURES];
    389. if( fn.empty() )
    390. return false;
    391. return featureEvaluator->read(fn);
    392. }
    393. bool HOGCascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
    394. int stripSize, int yStep, double factor, vector<Rect>& candidates,
    395. vector<int>& levels, vector<double>& weights, bool outputRejectLevels )
    396. {
    397. if( !featureEvaluator->setImage( image, data.origWinSize ) )
    398. return false;
    399. Mat currentMask;
    400. if (!maskGenerator.empty()) {
    401. currentMask=maskGenerator->generateMask(image);
    402. }
    403. vector<Rect> candidatesVector;
    404. vector<int> rejectLevels;
    405. vector<double> levelWeights;
    406. Mutex mtx;
    407. if( outputRejectLevels )
    408. {
    409. parallel_for_(Range(0, stripCount), HOGCascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor,
    410. candidatesVector, rejectLevels, levelWeights, true, currentMask, &mtx));
    411. levels.insert( levels.end(), rejectLevels.begin(), rejectLevels.end() );
    412. weights.insert( weights.end(), levelWeights.begin(), levelWeights.end() );
    413. }
    414. else
    415. {
    416. parallel_for_(Range(0, stripCount), HOGCascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor,
    417. candidatesVector, rejectLevels, levelWeights, false, currentMask, &mtx));
    418. }
    419. candidates.insert( candidates.end(), candidatesVector.begin(), candidatesVector.end() );
    420. return true;
    421. }
    422. int HOGCascadeClassifier::runAt( Ptr<HOGEvaluator>& evaluator, Point pt, double& weight )
    423. {
    424. assert( data.featureType == HOGEvaluator::HOG );
    425. if( !evaluator->setWindow(pt) )
    426. return -1;
    427. if( data.isStumpBased )
    428. {
    429. return HOGpredictOrderedStump( *this, evaluator, weight );
    430. }
    431. else
    432. {
    433. return HOGpredictOrdered( *this, evaluator, weight );
    434. }
    435. }
    436. void HOGCascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
    437. vector<int>& rejectLevels,
    438. vector<double>& levelWeights,
    439. double scaleFactor, int minNeighbors,
    440. int flags, Size minObjectSize, Size maxObjectSize,
    441. bool outputRejectLevels )
    442. {
    443. const double GROUP_EPS = 0.2;
    444. CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
    445. if( empty() )
    446. return;
    447. objects.clear();
    448. // TODO
    449. if (!maskGenerator.empty()) {
    450. maskGenerator->initializeMask(image);
    451. }
    452. if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )
    453. maxObjectSize = image.size();
    454. Mat grayImage = image;
    455. if( grayImage.channels() > 1 )
    456. {
    457. Mat temp;
    458. cvtColor(grayImage, temp, COLOR_BGR2GRAY);
    459. grayImage = temp;
    460. }
    461. Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
    462. vector<Rect> candidates;
    463. for( double factor = 1; ; factor *= scaleFactor )
    464. {
    465. Size originalWindowSize = data.origWinSize;
    466. Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
    467. Size scaledImageSize( cvRound( grayImage.cols/factor ), cvRound( grayImage.rows/factor ) );
    468. Size processingRectSize( scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height );
    469. if( processingRectSize.width <= 0 || processingRectSize.height <= 0 )
    470. break;
    471. if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height )
    472. break;
    473. if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height )
    474. continue;
    475. Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data );
    476. resize( grayImage, scaledImage, scaledImageSize, 0, 0, INTER_LINEAR );
    477. int yStep = 4;
    478. int stripCount, stripSize;
    479. const int PTS_PER_THREAD = 1000;
    480. stripCount = ((processingRectSize.width/yStep)*(processingRectSize.height + yStep-1)/yStep + PTS_PER_THREAD/2)/PTS_PER_THREAD;
    481. stripCount = std::min(std::max(stripCount, 1), 100);
    482. stripSize = (((processingRectSize.height + stripCount - 1)/stripCount + yStep-1)/yStep)*yStep;
    483. if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates,
    484. rejectLevels, levelWeights, outputRejectLevels ) )
    485. break;
    486. }
    487. objects.resize(candidates.size());
    488. std::copy(candidates.begin(), candidates.end(), objects.begin());
    489. if( outputRejectLevels )
    490. {
    491. groupRectangles( objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
    492. }
    493. else
    494. {
    495. groupRectangles( objects, minNeighbors, GROUP_EPS );
    496. }
    497. }
    498. void HOGCascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
    499. double scaleFactor, int minNeighbors,
    500. int flags, Size minObjectSize, Size maxObjectSize)
    501. {
    502. vector<int> fakeLevels;
    503. vector<double> fakeWeights;
    504. detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,
    505. minNeighbors, flags, minObjectSize, maxObjectSize, false );
    506. }
    507. void HOGCascadeClassifier::setMaskGenerator(Ptr<MaskGenerator> _maskGenerator)
    508. {
    509. maskGenerator=_maskGenerator;
    510. }
    511. Ptr<HOGCascadeClassifier::MaskGenerator> HOGCascadeClassifier::getMaskGenerator()
    512. {
    513. return maskGenerator;
    514. }
    515. }

    参考:

    https://github.com/Schmetzler/opencv3_CascadeHOGhttps://github.com/Schmetzler/opencv3_CascadeHOG

    综述行人检测算法 - 腾讯云开发者社区-腾讯云行人检测( Pedestrian Detection)一直是计算机视觉研究中的热点和难点。行人检测要解决的问题是:找出图像或视频帧中所有的行人,包括位置和大小,...https://cloud.tencent.com/developer/article/1526509

    https://github.com/watersink/hoghttps://github.com/watersink/hog

  • 相关阅读:
    开关电源模块 遥控开/关电路
    基于 PowerMax 架构的银行双活数据中心实践分享
    全职RISC-V芯片D1开发板使用adb串口COM连接设备和文件上传下载
    教育现代化浪潮下 “游戏化”教育加速入场
    全栈开发性能优化基础第七单元日考技能测
    Socket.D 基于消息的响应式应用层网络协议
    如何运营好技术相关的自媒体?
    uni-app - 城市选择索引列表 / 通过 A-Z 排序的城市列表(uview 组件库 IndexList 索引列表)
    电子邮件漏洞以及 S/MIME 如何提供帮助
    11.27学术报告听讲笔记
  • 原文地址:https://blog.csdn.net/cxyhjl/article/details/125556569