国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 學院 > 開發設計 > 正文

Opencv 機器學習 快速入手小程序

2019-11-08 01:56:08
字體:
來源:轉載
供稿:網友

OpenCV機器學習庫中主要實現算法如下: 1)一般貝葉斯分類器(Normal Bayes Classifier):CvNormalBayesClassifier 2)K近鄰分類(K-nearest Neighbor Classifier): CvKNearest 3)支持向量機(Support Vector Machine):CvSVM 4)期望最大化(Expection Maximization): EM 5)決策樹(Decision Tree):CvDTree 6)隨機森林(Random Treess Classifier):CvForestTree 7)超隨機樹分類器(Extremely randomized trees Classifier): CvERTrees 8)Boost分類器(Boosted tree Classifier): CvBoost 9)梯度下降Boost分類器(Gradient Boosted Trees):CvGBTrees 10)神經網絡(Artificial Neural Networks): CvANN_MLP

#include "opencv2/core/core.hpp"#include "opencv2/highgui/highgui.hpp"#include "opencv2/imgipt = points.at<Point2f>(i); circle( img, ipt, 2, colorTab[clusterIdx], CV_FILLED, CV_AA ); } imshow("clusters", img); char key = (char)waitKey(); //無限等待 if( key == 27 || key == 'q' || key == 'Q' ) // 'ESC' break; }}#define NTRAINING_SAMPLES 100 // Number of training samples per class#define FRAC_LINEAR_SEP 0.9f // Fraction of samples which compose the linear separable part/************************************************************************//* SVM,support vector machine(支持向量機); *//************************************************************************/void SVM_(){ // Data for visual representation const int WIDTH = 512, HEIGHT = 512; Mat I = Mat::zeros(HEIGHT, WIDTH, CV_8UC3); //--------------------- 1. Set up training data randomly --------------------------------------- Mat trainData(2*NTRAINING_SAMPLES, 2, CV_32FC1); Mat labels (2*NTRAINING_SAMPLES, 1, CV_32FC1); RNG rng(100); // Random value generation class // Set up the linearly separable part of the training data int nLinearSamples = (int) (FRAC_LINEAR_SEP * NTRAINING_SAMPLES); // Generate random points for the class 1 Mat trainClass = trainData.rowRange(0, nLinearSamples); // The x coordinate of the points is in [0, 0.4) Mat c = trainClass.colRange(0, 1); rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(0.4 * WIDTH)); // The y coordinate of the points is in [0, 1) c = trainClass.colRange(1,2); rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT)); // Generate random points for the class 2 trainClass = trainData.rowRange(2*NTRAINING_SAMPLES-nLinearSamples, 2*NTRAINING_SAMPLES); // The x coordinate of the points is in [0.6, 1] c = trainClass.colRange(0 , 1); rng.fill(c, RNG::UNIFORM, Scalar(0.6*WIDTH), Scalar(WIDTH)); // The y coordinate of the points is in [0, 1) c = trainClass.colRange(1,2); rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT)); //------------------ Set up the non-linearly separable part of the training data --------------- // Generate random points for the classes 1 and 2 trainClass = trainData.rowRange( nLinearSamples, 2*NTRAINING_SAMPLES-nLinearSamples); // The x coordinate of the points is in [0.4, 0.6) c = trainClass.colRange(0,1); rng.fill(c, RNG::UNIFORM, Scalar(0.4*WIDTH), Scalar(0.6*WIDTH)); // The y coordinate of the points is in [0, 1) c = trainClass.colRange(1,2); rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT)); //------------------------- Set up the labels for the classes --------------------------------- labels.rowRange( 0, NTRAINING_SAMPLES).setTo(1); // Class 1 labels.rowRange(NTRAINING_SAMPLES, 2*NTRAINING_SAMPLES).setTo(2); // Class 2 //------------------------ 2. Set up the support vector machines parameters -------------------- CvSVMParams params; params.svm_type = SVM::C_SVC; params.C = 0.1; params.kernel_type = SVM::LINEAR; params.term_crit = TermCriteria(CV_TERMCRIT_ITER, (int)1e7, 1e-6); //------------------------ 3. Train the svm ---------------------------------------------------- cout << "Starting training process" << endl; CvSVM svm; /* svm.train(trainData, labels, Mat(), Mat(), params); svm.save("supportVectorMachine.txt"); */ svm.load("supportVectorMachine.txt"); cout << "Finished training process" << endl; //------------------------ 4. Show the decision regions ---------------------------------------- Vec3b green(0,100,0), blue (100,0,0); for (int i = 0; i < I.rows; ++i) for (int j = 0; j < I.cols; ++j) { Mat sampleMat = (Mat_<float>(1,2) << i, j); float response = svm.predict(sampleMat); if (response == 1) I.at<Vec3b>(j, i) = green; else if (response == 2) I.at<Vec3b>(j, i) = blue; } //----------------------- 5. Show the training data -------------------------------------------- int thick = -1; int lineType = 8; float px, py; // Class 1 for (int i = 0; i < NTRAINING_SAMPLES; ++i) { px = trainData.at<float>(i,0); py = trainData.at<float>(i,1); circle(I, Point( (int) px, (int) py ), 3, Scalar(0, 255, 0), thick, lineType); } // Class 2 for (int i = NTRAINING_SAMPLES; i <2*NTRAINING_SAMPLES; ++i) { px = trainData.at<float>(i,0); py = trainData.at<float>(i,1); circle(I, Point( (int) px, (int) py ), 3, Scalar(255, 0, 0), thick, lineType); } //------------------------- 6. Show support vectors -------------------------------------------- thick = 2; lineType = 8; int x = svm.get_support_vector_count(); for (int i = 0; i < x; ++i) { const float* v = svm.get_support_vector(i); circle( I, Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thick, lineType); } imwrite("result.png", I); // save the Image imshow("SVM for Non-Linear Training Data", I); // show it to the user waitKey(0);}/************************************************************************//* bayesian,Normal Bayes Classifier(貝葉斯分類) *//************************************************************************/void NBC() { float trainingData[8][3] = { {6, 180, 12}, {5.92, 190, 11}, {5.58, 170, 12}, {5.92, 165, 10}, {5, 100, 6}, {5.5, 150, 8},{5.42, 130, 7}, {5.75, 150, 9}}; Mat trainingDataMat(8, 3, CV_32FC1, trainingData); float responses[8] = {'M', 'M', 'M', 'M', 'F', 'F', 'F', 'F'}; Mat responsesMat(8, 1, CV_32FC1, responses); NormalBayesClassifier nbc; //NormalBayesClassifier nbc2; /* nbc.train(trainingDataMat, responsesMat); nbc.save("normalBayes.txt"); */ nbc.load("normalBayes.txt"); float myData[3] = {6, 130, 8}; Mat myDataMat(1, 3, CV_32FC1, myData); float r = nbc.predict( myDataMat ); cout<<endl<<"result: "<<(char)r<<endl; system("pause"); } //Gradient Boosted Trees/************************************************************************//* Gradient Boosted Trees (梯度Boost樹算法) *//************************************************************************/void GBT(){ double trainingData[28][2]={{210.4, 3}, {240.0, 3}, {300.0, 4}, {153.4, 3}, {138.0, 3}, {194.0,4}, {189.0, 3}, {126.8, 3}, {132.0, 2}, {260.9, 4}, {176.7,3}, {160.4, 3}, {389.0, 3}, {145.8, 3}, {160.0, 3}, {141.6,2}, {198.5, 4}, {142.7, 3}, {149.4, 3}, {200.0, 3}, {447.8,5}, {230.0, 4}, {123.6, 3}, {303.1, 4}, {188.8, 2}, {196.2,4}, {110.0, 3}, {252.6, 3} }; Mat trainingDataMat(28, 2, CV_32FC1, trainingData); float responses[28] = { 399900, 369000, 539900, 314900, 212000, 239999, 329999, 259900, 299900, 499998, 252900, 242900, 573900, 464500, 329900, 232000, 299900, 198999, 242500, 347000, 699900, 449900, 199900, 599000, 255000, 259900, 249900, 469000}; Mat responsesMat(28, 1, CV_32FC1, responses); //設置參數 CvGBTreesParams params; params.loss_function_type = CvGBTrees::ABSOLUTE_LOSS; params.weak_count = 10; params.shrinkage = 0.01f; params.subsample_portion = 0.8f; params.max_depth = 3; params.use_surrogates = false; CvGBTrees gbt; //訓練樣本 gbt.train(trainingDataMat, CV_ROW_SAMPLE, responsesMat, Mat(), Mat(), Mat(), Mat(),params); double sampleData[2]={185.4, 4}; //待預測樣本 Mat sampleMat(2, 1, CV_32FC1, sampleData); float r = gbt.predict(sampleMat); //預測 cout<<endl<<"result: "<<r<<endl; system("pause");}/************************************************************************//* Extremely randomized trees Classifier(絕對隨機森林算法) *//************************************************************************/void ET(){ double trainingData[28][2]={{210.4, 3}, {240.0, 3}, {300.0, 4}, {153.4, 3}, {138.0, 3}, {194.0,4}, {189.0, 3}, {126.8, 3}, {132.0, 2}, {260.9, 4}, {176.7,3}, {160.4, 3}, {389.0, 3}, {145.8, 3}, {160.0, 3}, {141.6,2}, {198.5, 4}, {142.7, 3}, {149.4, 3}, {200.0, 3}, {447.8,5}, {230.0, 4}, {123.6, 3}, {303.1, 4}, {188.8, 2}, {196.2,4}, {110.0, 3}, {252.6, 3} }; CvMat trainingDataCvMat = cvMat( 28, 2, CV_32FC1, trainingData ); float responses[28] = { 399900, 369000, 539900, 314900, 212000, 239999, 329999, 259900, 299900, 499998, 252900, 242900, 573900, 464500, 329900, 232000, 299900, 198999, 242500, 347000, 699900, 449900, 199900, 599000, 255000, 259900, 249900, 469000}; CvMat responsesCvMat = cvMat( 28, 1, CV_32FC1, responses ); CvRTParams params= CvRTParams(10, 2, 0, false,16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ); CvRTrees rtrees; rtrees.train(&trainingDataCvMat, CV_ROW_SAMPLE, &responsesCvMat, NULL, NULL, NULL, NULL,params); double sampleData[2]={201.5, 3}; Mat sampleMat(2, 1, CV_32FC1, sampleData); float r = rtrees.predict(sampleMat); cout<<endl<<"result: "<<r<<endl; system("pause");}/************************************************************************//* Expectation - Maximization (EM算法) *//************************************************************************/void EM_(){ Mat src = imread("4.jpg"); //讀取圖像 namedWindow( "my daughter", WINDOW_AUTOSIZE ); imshow( "my daughter", src); //顯示原始圖像 waitKey(0); //data為樣本數據,labels為樣本數據的類別標簽 Mat data, labels; //由彩色圖像轉換為EM算法所需的樣本數據 for (int i = 0; i < src.rows; i++) { for (int j = 0; j < src.cols; j++) { Vec3b point = src.at<Vec3b>(i, j); //提取出當前像素的彩色值 //三個顏色強度值轉換為一個樣本數據 Mat tmp = (Mat_<float>(1, 3) << point[0], point[1], point[2]); data.push_back(tmp); //存儲當前樣本 } } int clusters = 4; //表示要分割的數量,即一共分4個類 EM em = EM(clusters); //實例化EM //訓練樣本,得到樣本的類別標簽labels em.train(data, noArray(), labels, noArray()); //不同的類用不同的顏色代替 Vec3b colorTab[] = { Vec3b(0, 0, 255), Vec3b(0, 255, 0), Vec3b(255, 100, 100), Vec3b(255, 0, 255), Vec3b(0, 255, 255) }; int n = 0; //樣本數據的索引 for (int i = 0; i < src.rows; i++) { for (int j = 0; j < src.cols; j++) { int clusterIdx = labels.at<int>(n); //得到當前像素的類別標簽 src.at<Vec3b>(i, j) = colorTab[clusterIdx]; //賦上相應的顏色值 n++; } } namedWindow( "EM", WINDOW_AUTOSIZE ); imshow( "EM", src); //顯示分割結果 waitKey(0);}static const char* var_desc[] ={ "Age (young=Y, middle=M, old=O)", "Salary? (low=L, medium=M, high=H)", "Own_House? (false=N, true=Y)", "Own_Car? (false=N, true=Y)", "Credit_Rating (fair=F, good=G, Excellent=E)", 0};/************************************************************************//* Decision Tree(決策樹); *//************************************************************************/void DT(){ //19個訓練樣本 float trainingData[19][5]={ {'Y','L','N','N','F'}, {'Y','L','Y','N','G'}, {'Y','M','Y','N','G'}, {'Y','M','Y','Y','G'}, {'Y','H','Y','Y','G'}, {'Y','M','N','Y','G'}, {'M','L','Y','Y','E'}, {'M','H','Y','Y','G'}, {'M','L','N','Y','G'}, {'M','M','Y','Y','F'}, {'M','H','Y','Y','E'}, {'M','M','N','N','G'}, {'O','L','N','N','G'}, {'O','L','Y','Y','E'}, {'O','L','Y','N','E'}, {'O','M','N','Y','G'}, {'O','L','N','N','E'}, {'O','H','N','Y','F'}, {'O','H','Y','Y','E'} }; Mat trainingDataMat(19, 5, CV_32FC1, trainingData); //樣本的矩陣形式 //樣本的分類結果,即響應值 float responses[19] = {'N','N','Y','Y','Y','N','Y','Y','N','N','Y','N','N','Y','Y','N','N','N','Y'}; Mat responsesMat(19, 1, CV_32FC1, responses); //矩陣形式 float priors[5] = {1, 1, 1, 1, 1}; //先驗概率,這里的每個特征屬性的作用都是相同 //定義決策樹的參數 CvDTreeParams params( 15, // 決策樹的最大深度 1, //決策樹葉節點的最小樣本數 0, //回歸精度,這里不需要 false, //是否使用替代分叉屬性,由于沒有缺失的特征屬性,所以這里不需要替代分叉屬性 25, //最大的類數量 0, // 交叉驗證的子集數,由于樣本太少,這里不需要交叉驗證 false, //使用1SE規則,這里不需要 false, //是否真正的去掉被剪切的分支,這里不需要 priors //先驗概率 ); //類形式的掩碼,這里是分類樹,而且5個特征屬性都是類的形式,因此該變量都為1 Mat varTypeMat(6, 1, CV_8U, Scalar::all(1)); CvDTree* dtree = new CvDTree(); //實例化CvDTree類 //訓練樣本,構建決策樹 dtree->train ( trainingDataMat, //訓練樣本 CV_ROW_SAMPLE, //樣本矩陣的行表示樣本,列表示特征屬性 responsesMat, //樣本的響應值矩陣 Mat(), //應用所有的特征屬性 Mat(), //應用所有的訓練樣本 varTypeMat, //類形式的掩碼 Mat(), //沒有缺失任何特征屬性 params //決策樹參數 ); //調用get_var_importance函數 const CvMat* var_importance = dtree->get_var_importance(); //輸出特征屬性重要性程度 for( int i = 0; i < var_importance->cols*var_importance->rows; i++ ) { double val = var_importance->data.db[i]; char buf[100]; int len = (int)(strchr( var_desc[i], '(' ) - var_desc[i] - 1); strncpy( buf, var_desc[i], len ); buf[len] = '/0'; printf( "%s", buf ); printf( ": %g%%/n", val*100. ); } float myData[5] = {'M','H','Y','N','F'}; //預測樣本 Mat myDataMat(5, 1, CV_32FC1, myData); //矩陣形式 double r = dtree->predict( myDataMat, Mat(), false)->value; //得到預測結果 cout<<endl<<"result: "<<(char)r<<endl; //輸出預測結果 system("pause");}/************************************************************************//* Boosted tree classifier (Boost樹算法 ) *//************************************************************************/void BT(){ //訓練樣本 float trainingData[42][2]={ {40, 55},{35, 35},{55, 15},{45, 25},{10, 10},{15, 15},{40, 10}, {30, 15},{30, 50},{100, 20},{45, 65},{20, 35},{80, 20},{90, 5}, {95, 35},{80, 65},{15, 55},{25, 65},{85, 35},{85, 55},{95, 70}, {105, 50},{115, 65},{110, 25},{120, 45},{15, 45}, {55, 30},{60, 65},{95, 60},{25, 40},{75, 45},{105, 35},{65, 10}, {50, 50},{40, 35},{70, 55},{80, 30},{95, 45},{60, 20},{70, 30}, {65, 45},{85, 40} }; Mat trainingDataMat(42, 2, CV_32FC1, trainingData); //訓練樣本的響應值 float responses[42] = {'R','R','R','R','R','R','R','R','R','R','R','R','R','R','R','R', 'R','R','R','R','R','R','R','R','R','R', 'B','B','B','B','B','B','B','B','B','B','B','B','B','B','B','B' }; Mat responsesMat(42, 1, CV_32FC1, responses); float priors[2] = {1, 1}; //先驗概率 CvBoostParams params( CvBoost::REAL, // boost_type 10, // weak_count 0.95, // weight_trim_rate 15, // max_depth false, // use_surrogates priors // priors ); // CvBoost boost; Boost boost; boost.train ( trainingDataMat, CV_ROW_SAMPLE, responsesMat, Mat(), Mat(), Mat(), Mat(), params ); //預測樣本 float myData[2] = {55, 25}; Mat myDataMat(2, 1, CV_32FC1, myData); double r = boost.predict( myDataMat ); cout<<endl<<"result: "<<(char)r<<endl; system("pause");}void main(){ //KNN(); //Kmeans(); //SVM_(); //NBC(); GBT(); //ET(); //EM_(); //DT(); //BT(); //float labels[2][2][3][2] = {0,1,2,3,4,5,6,7,8,9,10,11}; //Mat labelsMat(2, 4, CV_32FC3, labels); //cout<<labelsMat<<endl; system("pause");}
發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 宜兰县| 界首市| 保定市| 曲松县| 西丰县| 武汉市| 剑阁县| 米林县| 赣榆县| 牙克石市| 太仓市| 赤水市| 时尚| 京山县| 沙田区| 五河县| 青田县| 鸡西市| 田东县| 碌曲县| 台江县| 云龙县| 泰兴市| 礼泉县| 洛南县| 太谷县| 郸城县| 蓝田县| 泽州县| 灵石县| 道孚县| 宁河县| 周口市| 北票市| 平安县| 巴彦淖尔市| 抚顺县| 中超| 华容县| 赤城县| 张家界市|