古詩詞大全網 - 成語故事 - 求BP神經網絡算法的C++源代碼

求BP神經網絡算法的C++源代碼

// AnnBP.cpp: implementation of the CAnnBP class.

//

//////////////////////////////////////////////////////////////////////

#include "StdAfx.h"

#include "AnnBP.h"

#include "math.h"

//////////////////////////////////////////////////////////////////////

// Construction/Destruction

//////////////////////////////////////////////////////////////////////

CAnnBP::CAnnBP()

{

eta1=0.3;

momentum1=0.3;

}

CAnnBP::~CAnnBP()

{

}

double CAnnBP::drnd()

{

return ((double) rand() / (double) BIGRND);

}

/*** 返回-1.0到1.0之間的雙精度隨機數 ***/

double CAnnBP::dpn1()

{

return (double) (rand())/(32767/2)-1;

}

/*** 作用函數,目前是S型函數 ***/

double CAnnBP::squash(double x)

{

return (1.0 / (1.0 + exp(-x)));

}

/*** 申請1維雙精度實數數組 ***/

double* CAnnBP::alloc_1d_dbl(int n)

{

double *new1;

new1 = (double *) malloc ((unsigned) (n * sizeof (double)));

if (new1 == NULL) {

AfxMessageBox("ALLOC_1D_DBL: Couldn't allocate array of doubles\n");

return (NULL);

}

return (new1);

}

/*** 申請2維雙精度實數數組 ***/

double** CAnnBP::alloc_2d_dbl(int m, int n)

{

int i;

double **new1;

new1 = (double **) malloc ((unsigned) (m * sizeof (double *)));

if (new1 == NULL) {

AfxMessageBox("ALLOC_2D_DBL: Couldn't allocate array of dbl ptrs\n");

return (NULL);

}

for (i = 0; i < m; i++) {

new1[i] = alloc_1d_dbl(n);

}

return (new1);

}

/*** 隨機初始化權值 ***/

void CAnnBP::bpnn_randomize_weights(double **w, int m, int n)

{

int i, j;

for (i = 0; i <= m; i++) {

for (j = 0; j <= n; j++) {

w[i][j] = dpn1();

}

}

}

/*** 0初始化權值 ***/

void CAnnBP::bpnn_zero_weights(double **w, int m, int n)

{

int i, j;

for (i = 0; i <= m; i++) {

for (j = 0; j <= n; j++) {

w[i][j] = 0.0;

}

}

}

/*** 設置隨機數種子 ***/

void CAnnBP::bpnn_initialize(int seed)

{

CString msg,s;

msg="Random number generator seed:";

s.Format("%d",seed);

AfxMessageBox(msg+s);

srand(seed);

}

/*** 創建BP網絡 ***/

BPNN* CAnnBP::bpnn_internal_create(int n_in, int n_hidden, int n_out)

{

BPNN *newnet;

newnet = (BPNN *) malloc (sizeof (BPNN));

if (newnet == NULL) {

printf("BPNN_CREATE: Couldn't allocate neural network\n");

return (NULL);

}

newnet->input_n = n_in;

newnet->hidden_n = n_hidden;

newnet->output_n = n_out;

newnet->input_units = alloc_1d_dbl(n_in + 1);

newnet->hidden_units = alloc_1d_dbl(n_hidden + 1);

newnet->output_units = alloc_1d_dbl(n_out + 1);

newnet->hidden_delta = alloc_1d_dbl(n_hidden + 1);

newnet->output_delta = alloc_1d_dbl(n_out + 1);

newnet->target = alloc_1d_dbl(n_out + 1);

newnet->input_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1);

newnet->hidden_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1);

newnet->input_prev_weights = alloc_2d_dbl(n_in + 1, n_hidden + 1);

newnet->hidden_prev_weights = alloc_2d_dbl(n_hidden + 1, n_out + 1);

return (newnet);

}

/* 釋放BP網絡所占地內存空間 */

void CAnnBP::bpnn_free(BPNN *net)

{

int n1, n2, i;

n1 = net->input_n;

n2 = net->hidden_n;

free((char *) net->input_units);

free((char *) net->hidden_units);

free((char *) net->output_units);

free((char *) net->hidden_delta);

free((char *) net->output_delta);

free((char *) net->target);

for (i = 0; i <= n1; i++) {

free((char *) net->input_weights[i]);

free((char *) net->input_prev_weights[i]);

}

free((char *) net->input_weights);

free((char *) net->input_prev_weights);

for (i = 0; i <= n2; i++) {

free((char *) net->hidden_weights[i]);

free((char *) net->hidden_prev_weights[i]);

}

free((char *) net->hidden_weights);

free((char *) net->hidden_prev_weights);

free((char *) net);

}

/*** 創建壹個BP網絡,並初始化權值***/

BPNN* CAnnBP::bpnn_create(int n_in, int n_hidden, int n_out)

{

BPNN *newnet;

newnet = bpnn_internal_create(n_in, n_hidden, n_out);

#ifdef INITZERO

bpnn_zero_weights(newnet->input_weights, n_in, n_hidden);

#else

bpnn_randomize_weights(newnet->input_weights, n_in, n_hidden);

#endif

bpnn_randomize_weights(newnet->hidden_weights, n_hidden, n_out);

bpnn_zero_weights(newnet->input_prev_weights, n_in, n_hidden);

bpnn_zero_weights(newnet->hidden_prev_weights, n_hidden, n_out);

return (newnet);

}

void CAnnBP::bpnn_layerforward(double *l1, double *l2, double **conn, int n1, int n2)

{

double sum;

int j, k;

/*** 設置閾值 ***/

l1[0] = 1.0;

/*** 對於第二層的每個神經元 ***/

for (j = 1; j <= n2; j++) {

/*** 計算輸入的加權總和 ***/

sum = 0.0;

for (k = 0; k <= n1; k++) {

sum += conn[k][j] * l1[k];

}

l2[j] = squash(sum);

}

}

/* 輸出誤差 */

void CAnnBP::bpnn_output_error(double *delta, double *target, double *output, int nj, double *err)

{

int j;

double o, t, errsum;

errsum = 0.0;

for (j = 1; j <= nj; j++) {

o = output[j];

t = target[j];

delta[j] = o * (1.0 - o) * (t - o);

errsum += ABS(delta[j]);

}

*err = errsum;

}

/* 隱含層誤差 */

void CAnnBP::bpnn_hidden_error(double *delta_h, int nh, double *delta_o, int no, double **who, double *hidden, double *err)

{

int j, k;

double h, sum, errsum;

errsum = 0.0;

for (j = 1; j <= nh; j++) {

h = hidden[j];

sum = 0.0;

for (k = 1; k <= no; k++) {

sum += delta_o[k] * who[j][k];

}

delta_h[j] = h * (1.0 - h) * sum;

errsum += ABS(delta_h[j]);

}

*err = errsum;

}

/* 調整權值 */

void CAnnBP::bpnn_adjust_weights(double *delta, int ndelta, double *ly, int nly, double **w, double **oldw, double eta, double momentum)

{

double new_dw;

int k, j;

ly[0] = 1.0;

for (j = 1; j <= ndelta; j++) {

for (k = 0; k <= nly; k++) {

new_dw = ((eta * delta[j] * ly[k]) + (momentum * oldw[k][j]));

w[k][j] += new_dw;

oldw[k][j] = new_dw;

}

}

}

/* 進行前向運算 */

void CAnnBP::bpnn_feedforward(BPNN *net)

{

int in, hid, out;

in = net->input_n;

hid = net->hidden_n;

out = net->output_n;

/*** Feed forward input activations. ***/

bpnn_layerforward(net->input_units, net->hidden_units,

net->input_weights, in, hid);

bpnn_layerforward(net->hidden_units, net->output_units,

net->hidden_weights, hid, out);

}

/* 訓練BP網絡 */

void CAnnBP::bpnn_train(BPNN *net, double eta, double momentum, double *eo, double *eh)

{

int in, hid, out;

double out_err, hid_err;

in = net->input_n;

hid = net->hidden_n;

out = net->output_n;

/*** 前向輸入激活 ***/

bpnn_layerforward(net->input_units, net->hidden_units,

net->input_weights, in, hid);

bpnn_layerforward(net->hidden_units, net->output_units,

net->hidden_weights, hid, out);

/*** 計算隱含層和輸出層誤差 ***/

bpnn_output_error(net->output_delta, net->target, net->output_units,

out, &out_err);

bpnn_hidden_error(net->hidden_delta, hid, net->output_delta, out,

net->hidden_weights, net->hidden_units, &hid_err);

*eo = out_err;

*eh = hid_err;

/*** 調整輸入層和隱含層權值 ***/

bpnn_adjust_weights(net->output_delta, out, net->hidden_units, hid,

net->hidden_weights, net->hidden_prev_weights, eta, momentum);

bpnn_adjust_weights(net->hidden_delta, hid, net->input_units, in,

net->input_weights, net->input_prev_weights, eta, momentum);

}

/* 保存BP網絡 */

void CAnnBP::bpnn_save(BPNN *net, char *filename)

{

CFile file;

char *mem;

int n1, n2, n3, i, j, memcnt;

double dvalue, **w;

n1 = net->input_n; n2 = net->hidden_n; n3 = net->output_n;

printf("Saving %dx%dx%d network to '%s'\n", n1, n2, n3, filename);

try

{

file.Open(filename,CFile::modeWrite|CFile::modeCreate|CFile::modeNoTruncate);

}

catch(CFileException* e)

{

e->ReportError();

e->Delete();

}

file.Write(&n1,sizeof(int));

file.Write(&n2,sizeof(int));

file.Write(&n3,sizeof(int));

memcnt = 0;

w = net->input_weights;

mem = (char *) malloc ((unsigned) ((n1+1) * (n2+1) * sizeof(double)));

// mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double)));

for (i = 0; i <= n1; i++) {

for (j = 0; j <= n2; j++) {

dvalue = w[i][j];

//fastcopy(&mem[memcnt], &dvalue, sizeof(double));

fastcopy(&mem[memcnt], &dvalue, sizeof(double));

memcnt += sizeof(double);

}

}

file.Write(mem,sizeof(double)*(n1+1)*(n2+1));

free(mem);

memcnt = 0;

w = net->hidden_weights;

mem = (char *) malloc ((unsigned) ((n2+1) * (n3+1) * sizeof(double)));

// mem = (char *) malloc (((n2+1) * (n3+1) * sizeof(double)));

for (i = 0; i <= n2; i++) {

for (j = 0; j <= n3; j++) {

dvalue = w[i][j];

fastcopy(&mem[memcnt], &dvalue, sizeof(double));

// fastcopy(&mem[memcnt], &dvalue, sizeof(double));

memcnt += sizeof(double);

}

}

file.Write(mem, (n2+1) * (n3+1) * sizeof(double));

// free(mem);

file.Close();

return;

}

/* 從文件中讀取BP網絡 */

BPNN* CAnnBP::bpnn_read(char *filename)

{

char *mem;

BPNN *new1;

int n1, n2, n3, i, j, memcnt;

CFile file;

try

{

file.Open(filename,CFile::modeRead|CFile::modeCreate|CFile::modeNoTruncate);

}

catch(CFileException* e)

{

e->ReportError();

e->Delete();

}

// printf("Reading '%s'\n", filename);// fflush(stdout);

file.Read(&n1, sizeof(int));

file.Read(&n2, sizeof(int));

file.Read(&n3, sizeof(int));

new1 = bpnn_internal_create(n1, n2, n3);

// printf("'%s' contains a %dx%dx%d network\n", filename, n1, n2, n3);

// printf("Reading input weights..."); // fflush(stdout);

memcnt = 0;

mem = (char *) malloc (((n1+1) * (n2+1) * sizeof(double)));

file.Read(mem, ((n1+1)*(n2+1))*sizeof(double));

for (i = 0; i <= n1; i++) {

for (j = 0; j <= n2; j++) {

//fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double));

fastcopy(&(new1->input_weights[i][j]), &mem[memcnt], sizeof(double));

memcnt += sizeof(double);

}

}

free(mem);

// printf("Done\nReading hidden weights..."); //fflush(stdout);

memcnt = 0;

mem = (char *) malloc (((n2+1) * (n3+1) * sizeof(double)));

file.Read(mem, (n2+1) * (n3+1) * sizeof(double));

for (i = 0; i <= n2; i++) {

for (j = 0; j <= n3; j++) {

//fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double));

fastcopy(&(new1->hidden_weights[i][j]), &mem[memcnt], sizeof(double));

memcnt += sizeof(double);

}

}

free(mem);

file.Close();

printf("Done\n"); //fflush(stdout);

bpnn_zero_weights(new1->input_prev_weights, n1, n2);

bpnn_zero_weights(new1->hidden_prev_weights, n2, n3);

return (new1);

}

void CAnnBP::CreateBP(int n_in, int n_hidden, int n_out)

{

net=bpnn_create(n_in,n_hidden,n_out);

}

void CAnnBP::FreeBP()

{

bpnn_free(net);

}

void CAnnBP::Train(double *input_unit,int input_num, double *target,int target_num, double *eo, double *eh)

{

for(int i=1;i<=input_num;i++)

{

net->input_units[i]=input_unit[i-1];

}

for(int j=1;j<=target_num;j++)

{

net->target[j]=target[j-1];

}

bpnn_train(net,eta1,momentum1,eo,eh);

}

void CAnnBP::Identify(double *input_unit,int input_num,double *target,int target_num)

{

for(int i=1;i<=input_num;i++)

{

net->input_units[i]=input_unit[i-1];

}

bpnn_feedforward(net);

for(int j=1;j<=target_num;j++)

{

target[j-1]=net->output_units[j];

}

}

void CAnnBP::Save(char *filename)

{

bpnn_save(net,filename);

}

void CAnnBP::Read(char *filename)

{

net=bpnn_read(filename);

}

void CAnnBP::SetBParm(double eta, double momentum)

{

eta1=eta;

momentum1=momentum;

}

void CAnnBP::Initialize(int seed)

{

bpnn_initialize(seed);

}