【图像算法】彩色图像分割专题八(基于MeanShift的彩色分割)

》原理以前的博客中已经有对meanshift原理的解释,这里就不啰嗦了,国外的资料看这:http://people.csail.mit.edu/sparis/#cvpr07

》源码

核心代码(参考网络)

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 //============================Meanshift==============================// void MyClustering::MeanShiftImg(IplImage * src , IplImage * dst , float r , int Nmin , int Ncon ) { int i , j , p ,k=0,run_meanshift_slec_number=0; int pNmin; //mean shift产生的特征的搜索框内的特征数 IplImage * temp , * gray; //转换到Luv空间的图像 CvMat * distance , * result , *mask; // CvMat * temp_mat ,*temp_mat_sub ,*temp_mat_sub2 ,* final_class_mat; //Luv空间的图像到矩阵,图像矩阵与随机选择点之差, CvMat * cn ,* cn1 , * cn2 , * cn3; double /*covar_img[3] ,*/ avg_img[3]; //图像的协方差主对角线上的元素和,各个通道的均值 double r1; //搜索半径 int temp_number; meanshiftpoint meanpoint[25]; //存储随机产生的25点 CvScalarcvscalar1,cvscalar2; int order[25]; Feature feature[100]; //特征 double shiftor; CvMemStorage * storage=NULL; CvSeq * seq=0 , * temp_seq=0 , *prev_seq; //---------------------------------------------RGB to Luv空间,初始化---------------------------------------------- temp=cvCreateImage(cvSize(src->width,src->height),IPL_DEPTH_8U, src->nChannels); gray=cvCreateImage(cvSize(src->width,src->height),IPL_DEPTH_8U, 1); temp_mat=cvCreateMat(src->height,src->width,CV_8UC3); final_class_mat =cvCreateMat(src->height,src->width,CV_8UC3); mask=cvCloneMat(temp_mat); temp_mat_sub=cvCreateMat(src->height,src->width,CV_32FC3); temp_mat_sub2=cvCreateMat(src->height,src->width,CV_32FC3); cvZero(temp); cvCvtColor(src,temp,CV_RGB2Luv); //RGB to Luv空间 distance=cvCreateMat(src->height,src->width,CV_32FC1); result=cvCreateMat(src->height,src->width,CV_8UC1); cvConvert(temp,temp_mat); //IplImage to Mat cn=cvCreateMat(src->height,src->width,CV_32FC1); cn1 =cvCloneMat(cn); cn2 =cvCloneMat(cn); cn3 =cvCloneMat(cn); storage = cvCreateMemStorage(0); //-------------------------------------------计算搜索窗口半径 r -------------------------------------------- if (r!=NULL) r1=r; else { cvscalar1=cvSum(temp_mat); avg_img[0]=cvscalar1.val[0]/(src->width * src->height); avg_img[1]=cvscalar1.val[1]/(src->width * src->height); avg_img[2]=cvscalar1.val[2]/(src->width * src->height); cvscalar1=cvScalar(avg_img[0],avg_img[1],avg_img[2],NULL); cvScale(temp_mat,temp_mat_sub,1.0,0.0); cvSubS(temp_mat_sub , cvscalar1 , temp_mat_sub ,NULL); cvMul(temp_mat_sub , temp_mat_sub , temp_mat_sub2); cvscalar1=cvSum(temp_mat_sub2); r1=0.4*cvSqrt( (cvscalar1.val[0] + cvscalar1.val[1] + cvscalar1.val[2])/(src->width * src->height)); ; } //初始化随机数生成种子 srand ((unsigned) time (NULL)); //--------------------循环,使用meanshift进行特征空间分析,终止条件是Nmin-------------------------------------- do { //--------------------------------------------初始化搜索窗口位置------------------------------------------- run_meanshift_slec_number++; cvSet(distance,cvScalar(r1*r1,NULL,NULL,NULL),NULL); for ( i = 0 ; i < 25 ; i++) { meanpoint[i].pt.x = rand ()%src->width; meanpoint[i].pt.y = rand ()%src->height; } cvScale(temp_mat,temp_mat_sub,1.0,0.0); for ( i = 0 ; i < 25 ; i++) { /*cvSubS(temp_mat_sub ,cvScalar(cvGetReal3D(temp_mat,meanpoint[i].pt.x,meanpoint[i].pt.y,0), cvGetReal3D(temp_mat,meanpoint[i].pt.x,meanpoint[i].pt.y,1), cvGetReal3D(temp_mat,meanpoint[i].pt.x,meanpoint[i].pt.y,2), NULL),temp_mat_sub,NULL); */ cvSplit(temp_mat_sub,cn,cn1,cn2,NULL); cvSubS(temp_mat_sub,cvScalar(cvmGet(cn,meanpoint[i].pt.y,meanpoint[i].pt.x), cvmGet(cn1,meanpoint[i].pt.y,meanpoint[i].pt.x), cvmGet(cn2,meanpoint[i].pt.y,meanpoint[i].pt.x),NULL),temp_mat_sub,NULL); cvMul(temp_mat_sub,temp_mat_sub,temp_mat_sub2,1); cvSplit(temp_mat_sub2,cn,cn1,cn2,NULL); cvAdd(cn,cn1,cn3,NULL); cvAdd(cn2,cn3,cn3,NULL); //cn3中存放着,当前随机点与空间中其它点距离的平方。 cvCmp(cn3,distance,result,CV_CMP_LE); //距离小于搜索半径则result相应位为1 cvAndS(result,cvScalar(1,NULL,NULL,NULL),result,NULL); cvscalar1=cvSum(result); meanpoint[i].con_f_number = ( int )cvscalar1.val[0]; } for (i = 0 ; i < 25 ; i++) { order[i]=i; } for (i = 0 ; i < 25 ; i++) for (j = 0 ; j < 25-i-1; j++) { if (meanpoint[order[j]].con_f_number < meanpoint[order[j+1]].con_f_number) { temp_number=order[j]; order[j]=order[j+1]; order[j+1]=temp_number; } } //--------------------------------------------meanshift算法------------------------------------------------ double temp_mean[3]; for ( i = 0 ; i < 25 ; i++) { cvScale(temp_mat,temp_mat_sub,1.0,0.0); cvSplit(temp_mat_sub,cn,cn1,cn2,NULL); temp_mean[0]=cvmGet(cn, meanpoint[order[i]].pt.y , meanpoint[order[i]].pt.x); temp_mean[1]=cvmGet(cn1 , meanpoint[order[j]].pt.y , meanpoint[order[i]].pt.x); temp_mean[2]=cvmGet(cn2 , meanpoint[order[j]].pt.y , meanpoint[order[i]].pt.x); //meanshift过程 do { //计算出在搜索窗口内的特征点,并且生成对应的模板,即对应的点置一的矩阵表示对应的点在搜索框内 cvScale(temp_mat,temp_mat_sub,1.0,0.0); cvSubS(temp_mat_sub,cvScalar(temp_mean[0],temp_mean[1],temp_mean[2],NULL),temp_mat_sub,NULL); cvMul(temp_mat_sub,temp_mat_sub,temp_mat_sub2,1); cvSplit(temp_mat_sub2 , cn , cn1 , cn2 , NULL ); cvAdd(cn,cn1,cn3,NULL); cvAdd(cn2,cn3,cn3,NULL); //cn3中存放着,当前随机点与空间中其它点距离的平方。 cvCmp(cn3,distance,result,CV_CMP_LE); //距离小于搜索半径则result相应位为0XFF //计算shiftor cvCopy(temp_mat , final_class_mat ,NULL); // cvMerge(result , result ,result ,NULL,mask); cvAnd(final_class_mat , mask ,final_class_mat ,NULL); //与mask(3通道,0XFF)做与操作,把搜索半径外的点置零 cvScale(final_class_mat,temp_mat_sub,1.0,0.0); //搜索半径内的点从8U转换成32F cvAndS(result,cvScalar(1,NULL,NULL,NULL),result,NULL); //相应位set 1 cvscalar1=cvSum(result); //reslut 作为 模板 ,返回搜索窗口内的特征数 cvSubS(temp_mat_sub,cvScalar(temp_mean[0],temp_mean[1],temp_mean[2],NULL),temp_mat_sub,result); cvscalar2=cvSum(temp_mat_sub); cvscalar2.val[0] = cvscalar2.val[0]/cvscalar1.val[0] ; cvscalar2.val[1] = cvscalar2.val[1]/cvscalar1.val[0] ; cvscalar2.val[2] = cvscalar2.val[2]/cvscalar1.val[0] ; shiftor=cvSqrt( pow (cvscalar2.val[0], 2) + pow (cvscalar2.val[1], 2) + pow (cvscalar2.val[2], 2)); temp_mean[0]=temp_mean[0]+cvscalar2.val[0]; temp_mean[1]=temp_mean[1]+cvscalar2.val[1]; temp_mean[2]=temp_mean[2]+cvscalar2.val[2]; /*cvCopy(temp_mat , final_class_mat ,NULL); // cvMerge(result , result ,result ,NULL,mask); cvAnd(final_class_mat , mask ,final_class_mat ,NULL); //与result做与操作,把搜索半径外的点置零 cvScale(final_class_mat,temp_mat_sub,1.0,0.0); //搜索半径内的点从8U转换成32F cvSplit(temp_mat_sub,cn,cn1,cn2,NULL); cvSubS(cn , cvScalar(temp_mean[0],NULL,NULL,NULL),cn,result); cvSubS(cn1, cvScalar(temp_mean[1],NULL,NULL,NULL),cn1,result); cvSubS(cn2, cvScalar(temp_mean[2],NULL,NULL,NULL),cn2,result); cvMerge(cn,cn1,cn2,NULL,temp_mat_sub); cvscalar2=cvSum(temp_mat_sub); shiftor=cvSqrt(pow(cvscalar2.val[0] , 2) + pow(cvscalar2.val[1] , 2) +pow(cvscalar2.val[2] , 2)); temp_mean[0]=temp_mean[0]+cvscalar2.val[0]; temp_mean[1]=temp_mean[1]+cvscalar2.val[1]; temp_mean[2]=temp_mean[2]+cvscalar2.val[2]; */ } while (shiftor>0.1); //meanshift算法过程 //--------------------------------------------去除不重要特征----------------------------------------------- if (k==0) { feature[k].pt.x = temp_mean[0]; feature[k].pt.y = temp_mean[1]; feature[k].pt.z = temp_mean[2]; feature[k].number= ( int )cvscalar1.val[0]; //因为小于等于的情况成立时,result对应位置是0XFF,不成立时对应位置为0 pNmin= ( int )cvscalar1.val[0]; //此特征搜索窗口内,特征空间的向量个数 feature[k].result=cvCreateMat(src->height,src->width,CV_8UC1); cvAndS(result,cvScalar(1,NULL,NULL,NULL),result,NULL); cvCopy(result,feature[k].result,NULL); k++; } else { int flag = 0; for (j = 0 ; j < k ; j++) { if ( pow (temp_mean[0]-feature[j].pt.x , 2) + pow (temp_mean[1]-feature[j].pt.y ,2) + pow (temp_mean[2]-feature[j].pt.z, 2) < r1*r1) { flag = 1; break ; } } if (flag==0) { feature[k].pt.x = temp_mean[0]; feature[k].pt.y = temp_mean[1]; feature[k].pt.z = temp_mean[2]; feature[k].number=( int )cvscalar1.val[0]; pNmin= ( int )cvscalar1.val[0]; //此特征搜索窗口内,特征空间的向量个数 feature[k].result=cvCreateMat(src->height,src->width,CV_8UC1); cvCopy(result,feature[k].result,NULL); k++; //if(pNmin < Nmin ) //break; } } //去除不重要特征 //if(pNmin < Nmin) //break; } // } while (pNmin > Nmin || run_meanshift_slec_number>60 ); //------------------------------------------------后处理--------------------------------------------------------- cvSetZero(result); for ( i = 0 ; i < k ; i ++) { cvOr(result,feature[i].result,result,NULL); } cvScale(temp_mat,temp_mat_sub,1.0,0.0); cvSplit(temp_mat_sub,cn,cn1,cn2,NULL); for (i = 0 ; i < src->width ; i++) for ( j = 0 ; j < src->height ; j++) { if (cvGetReal2D(result,j,i)==0) //未分类的像素点,进行分类,为最近的特征中心 { double unclass_dis , min_dis; int min_dis_index; for ( p = 0 ; p < k ; p++ ) { unclass_dis = pow (feature[p].pt.x - cvmGet(cn,j,i),2) //(temp_mat,i,j,0) ,2) + pow (feature[p].pt.y - cvmGet(cn1,j,i),2) //(temp_mat,i,j,1) ,2) + pow (feature[p].pt.z - cvmGet(cn2,j,i),2); //(temp_mat,i,j,2) ,2); if (p==0) { min_dis = unclass_dis; min_dis_index = p; } else { if (unclass_dis < min_dis) { min_dis = unclass_dis; min_dis_index = p; } } } // end for 与特征比较 cvSetReal2D(feature[min_dis_index].result ,j,i ,1); } } //完成未分类的像素点的分类 cvSetZero(final_class_mat); for ( i = 0 ; i < k ; i++) { cvSet(temp_mat, cvScalar( rand ()%255, rand ()%255, rand ()%255, rand ()%255), feature[i].result); cvCopy(temp_mat,final_class_mat,feature[i].result); } cvConvert(final_class_mat,dst); //删除小于Ncon大小的区域 for ( i = 0 ; i < k ; i++) { cvClearMemStorage(storage); if (seq) cvClearSeq(seq); cvConvert( feature[i].result , gray); cvFindContours( gray , storage , & seq , sizeof (CvContour) , CV_RETR_LIST); for (temp_seq = seq ; temp_seq ; temp_seq = temp_seq->h_next) { CvContour * cnt = (CvContour*)seq; if (cnt->rect.width * cnt->rect.height < Ncon) { prev_seq = temp_seq->h_prev; if (prev_seq) { prev_seq->h_next = temp_seq->h_next; if (temp_seq->h_next) temp_seq->h_next->h_prev = prev_seq ; } else { seq = temp_seq->h_next ; if (temp_seq->h_next ) temp_seq->h_next->h_prev = NULL ; } } } // cvDrawContours(src, seq , CV_RGB(0,0,255) ,CV_RGB(0,0,255),1); } //----------------释放空间------------------------------------------------------- cvReleaseImage(& temp); cvReleaseImage(& gray); cvReleaseMat(&distance); cvReleaseMat(&result); cvReleaseMat(&temp_mat); cvReleaseMat(&temp_mat_sub); cvReleaseMat(&temp_mat_sub2); cvReleaseMat(&final_class_mat); cvReleaseMat(&cn); cvReleaseMat(&cn1); cvReleaseMat(&cn2); cvReleaseMat(&cn3); }
》效果



运行时间16.5s
原图:
【图像算法】彩色图像分割专题八(基于MeanShift的彩色分割)
文章图片



分割图:
【图像算法】彩色图像分割专题八(基于MeanShift的彩色分割)
文章图片

被改写了的原图:
【图像算法】彩色图像分割专题八(基于MeanShift的彩色分割)
文章图片

From:http://www.cnblogs.com/skyseraph/
新浪微博:http://weibo.com/u/1645794700/home?wvr=5&c=spr_web_360_hao360_weibo_t001
【【图像算法】彩色图像分割专题八(基于MeanShift的彩色分割)】 CV机器视觉2013CV机器视觉2013CV机器视觉2013

    推荐阅读