MATLAB: Undefined symbols for architecture x86_64:

mexmex compileros xr2012a

#include "mex.h"
void unravel(uint16_T *hx, double *link, double *x, double xsz,int hxsz)
{
int i=15;
int j=0, k=0, n=0;
while (xsz-k)
{
if (*(link + n) >0) {
if ((*(hx + j) >> i) & 0x0001)
n=*(link + n);
else n=*(link + n) - 1;
if (i) i--; else { j++; i= 15;}
if (j>hxsz)
mexErrMsgTxt("Out of cod ebits ???");
}
else {
*(x + k++) =-*(link +n);
n=0; }
}
if (k== xsz -1)
*(x+ k++) =-*(link +n);
}
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
double *link, *x, xsz;
uint16_T *hx;
int hxsz;
if(nrhs !=3)
mexErrMsgTxt("Three inputs required.");
else if (nlhs>1)
mexErrMSgTxt("Too many output arguments.");
if(!mxIsDouble(prhs[2]) || mxIsComplex(prhs[2]) ||
mxGetN(prhs[2])*mxGetM(prhs[2]) !=1)
mexErrMsgTxt("Input size must be a scalar");
hx=(uint16_T *) mxGetData(prhs[0]);
link=(double*) mxGetData(prhs[1]);
xsz=mxGetScalar(prhs[2]);
hxsz=mxGetM(prhs[0]);
plhs[0]=mxCreateDoubleMatrix(xsz, 1, mxREAL);
x=(double *) mxGetData(plhs[0]);
unravel(hx, link, x, xsz, hxsz);
}
This is my program for unravel function from Gonzalez's book Image processing using matlab. but while executing the program on matlab, it shows the following error
>>mex unrav.c
Undefined symbols for architecture x86_64:
"_mexErrMSgTxt", referenced from:
_mexFunction in unrav.o
ld: symbol(s) not found for architecture x86_64
collect2: ld returned 1 exit status
I am using Mac OSX Lion 10.7.2 and Matlab version is R2012a
Please some help me…this has created great panic to me… i can't understand what I do now
mex: link of ' "unrav.mexmaci64"' failed.

Best Answer

In the line:
mexErrMSgTxt("Too many output arguments.");
mexErrMSgTxt has the wrong upper/lower case. You need: mexErrMsgTxt.