Thank you for your comments.
After correcting the location you pointed out, we can now avoid forced termination.thank you very much。
However, the program did not pass the coordinates well.
If you rewrite the program by referring to the comments, comment out and post a new one.
When I entered the above one, the coordinates for matching returned were (y, x)=(45,204).(Image 1 has 225×225 pixels, clearly farther than the target pixel)
I don't know if the program formula is wrong or if the algorithm is used incorrectly in the first place.
I use C language and GCC.
I would like to implement the first pixel with the highest similarity using ZNCC (template matching), based on brightness information, but the program terminates at a certain location where the search is located // The termination has been resolved!Thank you!
The pixels in the image are positioned in a one-dimensional array.
img represents the original large image and temp represents the template image.
Within the NCC function, we find the similarity starting from (x,y) of the original image, and find the coordinates with the highest similarity in the template matching function.
I'm a beginner in programming and can only write a code that doesn't work, but I appreciate your cooperation.
typedef structure{
unsigned int width;
unsigned height;
Rgb*rgb;
Hsv*hsv;
}Image;
int template matching (Image*img, Image*temp)
{
inti,j,k,x,y,index;
double ncc = -1.0, n, mu = 0;
for(k=0;k<temp->height*temp->width;k++){
mu+=temp->hsv[k].v;
}
mu = mu/k;
for(i=0;i<img->height;i++){
for(j=0;j<img->width;j++){
if(ncc<(n=NCC(img,temp,j,i,mu)))
{
ncc = n;
y=i;
x = j;
}
}
}
index=y*img->width+x;
return index;
}
double NCC (Image*img, Image*temp, intx, inty, double mu)
{
inti,j,k = 0,index;
double bunsi = 0, bunbo1 = 0, bunbo2 = 0;
double ncc, mi = 0;
if(img->width-x<temp->width/3||img->height-y<temp->height/3){return-2.0;} // I thought this would work better.
index=y*img->width+x;
for(i=0;i<temp->height-1;i++){
for(j=0;j<temp->width-1;j++){
if((y+(i*img->width))>img->height)||(x+j>img->width))break;
mi+=(img->hsv[index+i*img->width+j].v);
k++;
}
}
mi = mi/k;
for(i=0;i<temp->height-1;i++){
for(j=0;j<temp->width-1;j++){
// if((y+i*img->width>img->height)||(x+j>img->width))break;
if((y+i)>img->height||(x+j>img->width))break;
bunsi+=(img->hsv[index+i*img->width+j].v-mi)*(temp->hsv[i*temp->width+j].v-mu);
// bunbo1+=sqrt((img->hsv[index+i*img->width+j].v-mi)*(img->hsv[index+i*img->width+j].v-mi));
// bunbo2+=sqrt(temp->hsv [i*temp->width+j].v-mu)*(temp->hsv [i*temp->width+j].v-mu));
bunbo1+=fabs(img->hsv[index+i*img->width+j].v-mi);
bunbo2+=fabs(img->hsv[index+i*img->width+j].v-mi);
}
}
if(bunbo1==0||bunbo2==0)return-2.0;//Return -2 if denominator is 0 just in case
ncc=bunsi/(bunbo1*bunbo2);
return ncc;
}
if(y+i)>img->height||(x+j>img->width))break;
results in
correcting out-of-range access
At the edge of the image, the area to be aggregated has changed, making it inappropriate as a measure.
Simply targeting only the extent that the temp range does not extend beyond img is a natural result.
If you want to match a part of the image, I think you need to design a different scale.
double NCC (Image*img, Image*temp, intx, inty, double mu)
{
// Returns -2 if template image is out of range of matching image
if(x+temp->width>=img->width)
return-2.0;
if(y+temp->height>=img->height)
return-2.0;
const int index=y*img->width+x;
double mi = 0;
intk = 0;
for (inti=0; i<temp->height-1;i++) {
for (int j=0; j<temp->width-1;j++) {
if(((y+(i*img->width))>img->height)||(x+j>img->width))break;
mi+=(img->hsv[index+i*img->width+j].v);
k++;
}
}
mi = mi/k;
double bunsi = 0, bunbo1 = 0, bunbo2 = 0;
for (inti=0; i<temp->height-1;i++)
{
for (int j=0; j<temp->width-1;j++)
{
bunsi+=(img->hsv[index+i*img->width+j].v-mi)*(temp->hsv[i*temp->width+j].v-mu);
bunbo1+=fabs(img->hsv[index+i*img->width+j].v-mi);
bunbo2+=fabs(img->hsv[index+i*img->width+j].v-mu);//mi to mu
}
}
if (bunbo1==0||bunbo2==0)
return -2.0; // Returns -2 if denominator is 0 just in case
double ncc=bunsi/(bunbo1*bunbo2);
return ncc;
}
© 2024 OneMinuteCode. All rights reserved.