鼠标轨迹识别

用神经网学习过程让一系列鼠标轨迹和具体的含义产生关联,然后就可以进行正常工作了, 后将学习算法替换为svm, 效率更高,效果更好。

以下代码使用了bpnn库,其数学原理为,事先给定一个随机系数的多层交叉累加运算过程,让已知的输入放入这个运算过程中,得到输出与预期输出直接的误差,将误差按一固定的比例分配到每层都系数上,逐渐逼近正确参数。

#define fastcopy(to,from,len)\
{\
  register char *_to,*_from;\
  register int _i,_l;\
  _to = (char *)(to);\
  _from = (char *)(from);\
  _l = (len);\
  for (_i = 0; _i < _l; _i++) *_to++ = *_from++;\
}


/*** Returns double random number between 0.0 and 1.0 ***/
double drnd_bpnn()
{
  return ((double) random() / (double) BIGRND);
}




/*** Return random number between -1.0 and 1.0 ***/
double dpn1()
{
  return ((drnd_bpnn() * 2.0) - 1.0);
}


/*** The squashing function.  Currently, it's a sigmoid. ***/


double squash(double x)
{


// return tanh(x)/2;
  return (1.0 / (1.0 + exp(-x)));
}


/*
 * Allocate 1d array of doubles as memory addresses
 */
double *alloc_1d_dbl_bpnn(int n)      // length of array
{
  double *pDat;      // the pDat double array


  pDat = (double *) malloc ((unsigned) (n * sizeof (double)));
  if (pDat == NULL) {
    printf("ALLOC_1D_DBL: Couldn't allocate array of doubles\n");
    return (NULL);
  }
  return (pDat);
}




/*** Allocate 2d array of doubles ***/


double **alloc_2d_dbl(int m, int n)
{
  int i;
  double **pDat;


  pDat = (double **) malloc ((unsigned) (m * sizeof (double *)));
  if (pDat == NULL) {
    printf("ALLOC_2D_DBL: Couldn't allocate array of dbl ptrs\n");
    return (NULL);
  }


  for (i = 0; i < m; i++) {
    pDat[i] = (double*) alloc_1d_dbl_bpnn(n);
  }


  return (pDat);
}




void bpnn_randomize_weights(double **w, int m, int n)
{
  int i, j;


  for (i = 0; i <= m; i++) {
    for (j = 0; j <= n; j++) {
      w[i][j] = dpn1();
    }
  }
}




void bpnn_zero_weights(double **w, int m, int n)
{
  int i, j;


  for (i = 0; i <= m; i++) {
    for (j = 0; j <= n; j++) {
      w[i][j] = 0.0;
    }
  }
}




void bpnn_initialize(int seed)
{
  // printf("Random number generator seed: %d\n", seed);
  srandom(seed);
}




BPNN *bpnn_internal_create(int n_in, int n_hidden, int n_out)
{
  BPNN *newnet;


  newnet = (BPNN *) malloc (sizeof (BPNN));
  if (newnet == NULL) {
    printf("BPNN_CREATE: Couldn't allocate neural network\n");
    return (NULL);
  }


  newnet->input_n = n_in;
  newnet->hidden_n = n_hidden;
  newnet->output_n = n_out;
  newnet->input_units = (double*) alloc_1d_dbl_bpnn(n_in + 1);
  newnet->hidden_units = (double*) alloc_1d_dbl_bpnn(n_hidden + 1);
  newnet->output_units = (double*) alloc_1d_dbl_bpnn(n_out + 1);


  newnet->hidden_delta = (double *) alloc_1d_dbl_bpnn(n_hidden + 1);
  newnet->output_delta = (double*) alloc_1d_dbl_bpnn(n_out + 1);
  newnet->target = (double*) alloc_1d_dbl_bpnn(n_out + 1);


  newnet->input_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1);
  newnet->hidden_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1);


  newnet->input_prev_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1);
  newnet->hidden_prev_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1);


  return (newnet);
}




void bpnn_free(BPNN *net)
{
  int n1, n2, i;


  n1 = net->input_n;
  n2 = net->hidden_n;


  free((char *) net->input_units);
  free((char *) net->hidden_units);
  free((char *) net->output_units);


  free((char *) net->hidden_delta);
  free((char *) net->output_delta);
  free((char *) net->target);


  for (i = 0; i <= n1; i++) {
    free((char *) net->input_weights[i]);
    free((char *) net->input_prev_weights[i]);
  }
  free((char *) net->input_weights);
  free((char *) net->input_prev_weights);


  for (i = 0; i <= n2; i++) {
    free((char *) net->hidden_weights[i]);
    free((char *) net->hidden_prev_weights[i]);
  }
  free((char *) net->hidden_weights);
  free((char *) net->hidden_prev_weights);


  free((char *) net);
}




/*** Creates a pDat fully-connected network from scratch,
     with the given numbers of input, hidden, and output units.
     Threshold units are automatically included.  All weights are
     randomly initialized.


     Space is also allocated for temporary storage (momentum weights,
     error computations, etc).
***/


BPNN *bpnn_create(int n_in, int n_hidden, int n_out)
{


  BPNN *newnet;


  newnet = bpnn_internal_create(n_in, n_hidden, n_out);


  //#ifdef INITZERO
  // bpnn_zero_weights(newnet->input_weights, n_in, n_hidden);
  //#else
  bpnn_randomize_weights(newnet->input_weights, n_in, n_hidden);
  //#endif
  bpnn_randomize_weights(newnet->hidden_weights, n_hidden, n_out);
  bpnn_zero_weights(newnet->input_prev_weights, n_in, n_hidden);
  bpnn_zero_weights(newnet->hidden_prev_weights, n_hidden, n_out);


  return (newnet);
}






void bpnn_layerforward(double *l1, double *l2, double **conn, int n1, int n2)
{
  double sum;
  int j, k;


  /*** Set up thresholding unit ***/
  l1[0] = 1.0;


  /*** For each unit in second layer ***/
  for (j = 1; j <= n2; j++) {


    /*** Compute weighted sum of its inputs ***/
    sum = 0.0;
    for (k = 0; k <= n1; k++) {
      sum += conn[k][j] * l1[k];
    }
    l2[j] = squash(sum);
  }


}




void bpnn_output_error(double *delta, double *target, double *output, int nj, double *err)
{
  int j;
  double o, t, errsum;


  errsum = 0.0;
  for (j = 1; j <= nj; j++) {
    o = output[j];
    t = target[j];
    delta[j] = o * (1.0 - o) * (t - o);
    errsum += ABS(delta[j]);
  }
  *err = errsum;
}




void bpnn_hidden_error(double *delta_h, int nh, double *delta_o, int no, double **who, double *hidden, double *err)
{
  int j, k;
  double h, sum, errsum;


  errsum = 0.0;
  for (j = 1; j <= nh; j++) {
    h = hidden[j];
    sum = 0.0;
    for (k = 1; k <= no; k++) {
      sum += delta_o[k] * who[j][k];
    }
    delta_h[j] = h * (1.0 - h) * sum;
    errsum += ABS(delta_h[j]);
  }
  *err = errsum;
}




void bpnn_adjust_weights(double *delta, int ndelta, double *ly, int nly, double **w, double **oldw, double eta, double momentum)
{
  double new_dw;
  int k, j;


  ly[0] = 1.0;
  for (j = 1; j <= ndelta; j++) {
    for (k = 0; k <= nly; k++) {
      new_dw = ((eta * delta[j] * ly[k]) + (momentum * oldw[k][j]));
      w[k][j] += new_dw;
      oldw[k][j] = new_dw;
    }
  }
}




void bpnn_feedforward(BPNN *net)
{
  int in, hid, out;


  in = net->input_n;
  hid = net->hidden_n;
  out = net->output_n;


  /*** Feed forward input activations. ***/
  bpnn_layerforward(net->input_units, net->hidden_units,
      net->input_weights, in, hid);
  bpnn_layerforward(net->hidden_units, net->output_units,
      net->hidden_weights, hid, out);


}




void bpnn_train(BPNN *net, double eta, double momentum, double *eo, double *eh)
{
  int in, hid, out;
  double out_err, hid_err;


  in = net->input_n;
  hid = net->hidden_n;
  out = net->output_n;


  /*** Feed forward input activations. ***/
  bpnn_layerforward(net->input_units, net->hidden_units,
      net->input_weights, in, hid);
  bpnn_layerforward(net->hidden_units, net->output_units,
      net->hidden_weights, hid, out);


  /*** Compute error on output and hidden units. ***/
  bpnn_output_error(net->output_delta, net->target, net->output_units,
      out, &out_err);
  bpnn_hidden_error(net->hidden_delta, hid, net->output_delta, out,
      net->hidden_weights, net->hidden_units, &hid_err);
  *eo = out_err;
  *eh = hid_err;


  /*** Adjust input and hidden weights. ***/
  bpnn_adjust_weights(net->output_delta, out, net->hidden_units, hid,
      net->hidden_weights, net->hidden_prev_weights, eta, momentum);
  bpnn_adjust_weights(net->hidden_delta, hid, net->input_units, in,
      net->input_weights, net->input_prev_weights, eta, momentum);


#if 0


int j;


int i;


char buf[256];


ATLTRACE("Input weight:");  


for (i=0; i<=in; i++)


{


buf[0] = 0;


for (j=0; j<hid; j++)


{


sprintf(buf+strlen(buf), "%f, ", net->input_weights[i][j]);


}


ATLTRACE("%s", buf);  


}


ATLTRACE("Output weight:");  


for (i=0; i<hid; i++)


{


ATLTRACE("%f", net->hidden_weights[i+1][out]);  


}


#endif
}






void bpnn_save(BPNN *net, char *filename)


{


int n1, n2, n3, i, j, memcnt;


double dvalue, **w;


char *mem;


FILE *fd;






if ((fd = fopen(filename, "wb")) == NULL) {


printf("BPNN_SAVE: Cannot create '%s'\n", filename);


return;


}






n1 = net->input_n;  n2 = net->hidden_n;  n3 = net->output_n;


printf("Saving %dx%dx%d network to '%s'\n", n1, n2, n3, filename);


fflush(stdout);










fwrite((char *) &n1, sizeof(int), 1, fd);


fwrite((char *) &n2, sizeof(int), 1, fd);


fwrite((char *) &n3, sizeof(int), 1, fd);






memcnt = 0;


w = net->input_weights;


mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double)));


for (i = 0; i <= n1; i++) {


for (j = 0; j <= n2; j++) {


dvalue = w[i][j];


fastcopy(&mem[memcnt], &dvalue, sizeof(double));


memcnt += sizeof(double);


}


}






fwrite(mem, (n1+1) * (n2+1) * sizeof(double), 1, fd);


free(mem);






memcnt = 0;


w = net->hidden_weights;


mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)));


for (i = 0; i <= n2; i++) {


for (j = 0; j <= n3; j++) {


dvalue = w[i][j];


fastcopy(&mem[memcnt], &dvalue, sizeof(double));


memcnt += sizeof(double);


}


}






fwrite(mem, (n2+1) * (n3+1) * sizeof(double), 1, fd);


free(mem);






fclose(fd);


return;


}






BPNN *bpnn_read(char *filename)


{


char *mem;


BPNN *new1;


int n1, n2, n3, i, j, memcnt;


FILE *fd;






if ((fd = fopen(filename, "rb")) == NULL) {


return (NULL);


}






printf("Reading '%s'\n", filename);  fflush(stdout);






fread((char *) &n1, sizeof(int), 1, fd);


fread((char *) &n2, sizeof(int), 1, fd);


fread((char *) &n3, sizeof(int), 1, fd);






new1 = bpnn_internal_create(n1, n2, n3);






printf("'%s' contains a %dx%dx%d network\n", filename, n1, n2, n3);


printf("Reading input weights...");  fflush(stdout);






memcnt = 0;


mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double)));






fread( mem, (n1+1) * (n2+1) * sizeof(double), 1, fd);


for (i = 0; i <= n1; i++) {


for (j = 0; j <= n2; j++) {


fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double));


memcnt += sizeof(double);


}


}


free(mem);






printf("Done\nReading hidden weights...");  fflush(stdout);






memcnt = 0;


mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)));






fread( mem, (n2+1) * (n3+1) * sizeof(double), 1, fd);


for (i = 0; i <= n2; i++) {


for (j = 0; j <= n3; j++) {


fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double));


memcnt += sizeof(double);


}


}


free(mem);


fclose(fd);






printf("Done\n");  fflush(stdout);






bpnn_zero_weights(new1->input_prev_weights, n1, n2);


bpnn_zero_weights(new1->hidden_prev_weights, n2, n3);






return (new1);


}

LRESULT CMainDlg::OnClickedButton1(WORD wNotifyCode, WORD wID, HWND hWndCtl, BOOL& bHandled)
{
vector<datitem> dats;

{
  datitem item;
item.indat.push_back(0.);
item.indat.push_back(0.);
item.oudat[0] = 0.1;
dats.push_back(item);
}
{
datitem item;
item.indat.push_back(0.);
item.indat.push_back(1.);
item.oudat[0] = 0.;
dats.push_back(item);
}
{
datitem item;
item.indat.push_back(1.);
item.indat.push_back(0.);
item.oudat[0] = 0.;
dats.push_back(item);
}
{
datitem item;
item.indat.push_back(1.);
item.indat.push_back(1.);
item.oudat[0] = 0.1;
dats.push_back(item);
}
bpnn_initialize(0);
BPNN *bptmp = bpnn_create(2, 5, 1);
int j, i;
double err, eo, eh;
const int epochs = 3000;
TCHAR szBuf[256];
for (i=0;i<epochs;i++)
{
err = 0.;
vector<datitem>::iterator it;

for (it=dats.begin(); it!=dats.end(); it++)
{
for (j=0; j<it->indat.size(); j++)
{
bptmp->input_units[j+1] = it->indat[j];
}
bptmp->target[1] = it->oudat[0];
bpnn_train(bptmp, 0.8, 0.03, &eo, &eh);
err += eo+eh;
//ATLTRACE("%f %f", eo, eh);
}
}

{
err = 0.;
vector<datitem>::iterator it;

for (it=dats.begin(); it!=dats.end(); it++)
{
for (j=0; j<it->indat.size(); j++)
{
bptmp->input_units[j+1] = it->indat[j];
}

bpnn_feedforward(bptmp);
sprintf(szBuf, "(%f,%f)=%f target:%f", 
bptmp->input_units[1], bptmp->input_units[2], bptmp->output_units[1], 
it->oudat[0]);
OutputDebugStr(szBuf);
}
}

bpnn_free(bptmp);
return 0;
}
    
DWORD CMainDlg::trainTrd(void* param)
{
    CMainDlg * ins = reinterpret_cast<CMainDlg*>(param);


    ins->SetDlgItemText(IDC_INFO2, "...");


double eo,eh,err;
int i, j;
eo = 0.;
eh = 0.;
const int epochs = 3000;
TCHAR szInfo[256];
for (i=0;;i++)
{
err = 0.;
vector<datitem>::iterator it;
for (it=ins->dats.begin(); it!=ins->dats.end(); it++)
{
for (j=0; j<1024 && j<it->indat.size(); j++)
{
bp->input_units[j+1] = it->indat[j];
}
for (; j<1024; j++)
bp->input_units[j+1] = 0.;
bp->target[1] = it->oudat[0];
bp->target[2] = it->oudat[1];
bpnn_train(bp, 0.4, 0.3, &eo, &eh);
            err += eo;
}
if(err < 0.01) break;
if(ins->bQuit) return 0;
//err/=dats.size();
sprintf(szInfo, "%5d %.3f ", i, err);
ins->SetDlgItemText(IDC_INFO2, szInfo);


//if((eo<0.0003) && (eh<0.00002)) break;
}
sprintf(szInfo, " Training finished: eo:%.3f ", err);
ins->SetDlgItemText(IDC_INFO2, szInfo);
    ins->hTrainTrd = NULL;
    return 0;
}


LRESULT CMainDlg::OnTrain(WORD /*wNotifyCode*/, WORD /*wID*/, HWND /*hWndCtl*/, BOOL& /*bHandled*/)
{
    if(hTrainTrd) return 0;
    DWORD dwTrd;
    
ifstream isf(TRAINDAT);
char szBuf[40960];
char * p;
    dats.clear();
while(!isf.eof())
{
datitem tmp;
tmp.indat.clear();
*szBuf = 0;
isf.getline(szBuf, sizeof(szBuf));


if(!*szBuf) break;

p = szBuf;
for (;p && *p;)
{
while(p && (*p==' ')) p++;
tmp.indat.push_back(atof(p));
p = strchr(p, ' ');
}

isf.getline(szBuf, sizeof(szBuf));

p = szBuf;
while(p && (*p==' ')) p++;
tmp.oudat[0] = atof(p);
p = strchr(p, ' ');
tmp.oudat[1] = atof(p);


dats.push_back(tmp);
}
    isf.close();


    hTrainTrd = CreateThread(NULL, 0, trainTrd, (void*)this, 0, &dwTrd);
    return 1;


}

  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值