基于c#的神经网络分类
- 1、下载文档前请自行甄别文档内容的完整性,平台不提供额外的编辑、内容补充、找答案等附加服务。
- 2、"仅部分预览"的文档,不可在线预览部分如存在完整性等问题,可反馈申请退款(可完整预览的文档不适用该条件!)。
- 3、如文档侵犯您的权益,请联系客服反馈,我们会尽快为您处理(人工客服工作时间:9:00-18:30)。
using System;
using System.IO;
using System.Text;
namespace BpANNet
{
///
/// BpNet 的摘要说明。
///
public class BpNet
{static int err2 = 0;
static int err3 = 0;
static int err1 = 0;
public int inNum;//输入节点数
int hideNum;//隐层节点数
public int outNum;//输出层节点数
public int sampleNum;//样本总数
Random R;
double[] x;//输入节点的输入数据
double[] x1;//隐层节点的输出
double[] x2;//输出节点的输出
double[] o1;//隐层的输入
double[] o2;//输出层的输入
public double[,] w;//权值矩阵w
public double[,] v;//权值矩阵V
public double[,] dw;//权值矩阵w
public double[,] dv;//权值矩阵V
public double rate;//学习率
public double[] b1;//隐层阈值矩阵
public double[] b2;//输出层阈值矩阵
public double[] db1;//隐层阈值矩阵
public double[] db2;//输出层阈值矩阵
double[] pp;//输出层的误差
double[] qq;//隐层的误差
double[] yd;//输出层的教师数据
public double e;//均方误差
double in_rate;//归一化比例系数
public int computeHideNum(int m, int n)
{
double s = Math.Sqrt(m+n)+2;
int ss = Convert.ToInt32(s);
return ss;
}
public BpNet(double[,] p, double[,] t)
{
// 构造函数逻辑
R = new Random();
this.inNum = p.GetLength(1);
this.outNum = t.GetLength(1);
this.hideNum = computeHideNum(inNum, outNum);
// this.hideNum=18;
this.sampleNum = p.GetLength(0);
Console.WriteLine("输入节点数目: " + inNum);
Console.WriteLine("隐层节点数目:" + hideNum);
Console.WriteLine("输出层节点数目:" + outNum);
x = new double[inNum];
x1 = new double[hideNum];
x2 = new double[outNum];
o1 = new double[hideNum];
o2 = new double[outNum];
w = new double[inNum, hideNum];
v = new double[hideNum, outNum];
dw = new double[inNum, hideNum];
dv = new double[hideNum, outNum];
b1 = new double[hideNum];
b2 = new double[outNum];
db1 = new double[hideNum];
db2 = new double[outNum];
pp = new double[hideNum];
qq = new double[outNum];
yd = new double[outNum];
//初始化w
for (int i = 0; i < inNum; i++)
{
for (int j = 0; j < hideNum; j++)
{
w[i, j] = (R.NextDouble() * 2 - 1.0) / 2;
}
}
//初始化v
for (int i = 0; i < hideNum; i++)
{
for (int j = 0; j < outNum; j++)
{
v[i, j] = (R.NextDouble() * 2 - 1.0) / 2;
}
}
rate = 0.2;
e = 0.0;
in_rate = 1.0;
}
//训练函数
public void train(double[,] p, double[,] t)
{
e = 0.0;
//求p,t中的最大值
double pMax = 0.0;
for (int isamp = 0; isamp < sampleNum; isamp++)
{
for (int i = 0; i < inNum; i++)
{
if (Math.Abs(p[isamp, i]) > pMax)
{
pMax = Math.Abs(p[isamp, i]);
}
}
for (int j = 0; j < outNum; j++)
{
if (Math.Abs(t[isamp, j]) > pMax)
{
pMax = Math.Abs(t[isamp, j]);
}
}
in_rate = pMax;
}//end isamp
for (int isamp = 0; isamp < sampleNum; isamp++)
{
//数据归一化
for (int i = 0; i < inNum; i++)
{
x[i] = p[isamp, i] / in_rate;
}
for (int i = 0; i < outNum; i++)
{
yd[i] = t[isamp, i] / in_rate;
}
//计算隐层的输入和输出
for (int j = 0; j < hideNum; j++)
{
o1[j] = 0.0;
for (int i = 0; i < inNum; i++)
{
o1[j] += w[i, j] * x[i];
}
x1[j] = 1.0 / (1.0 + Math.Exp(-o1[j] - b1[j]));
}
//计算输出层的输入和输出
for (int k = 0; k < outNum; k++)
{
o2[k] = 0.0;
for (int j = 0; j < hideNum; j++)
{
o2[k] += v[j, k] * x1[j];
}
x2[k] = 1.0 / (1.0 + Math.Exp(-o2[k] - b2[k]));
}
//计算输出层误差和均方差
for (int k = 0; k < outNum; k++)
{
qq[k] = (yd[k] - x2[k]) * x2[k] * (1.0 - x2[k]);
e += (yd[k] - x2[k]) * (yd[k] - x2[k]);
//更新V
for (int j = 0; j < hideNum; j++)
{
v[j, k] += rate * qq[k] * x1[j];
}
}
//计算隐层误差
for (int j = 0; j < hideNum; j++)
{
pp[j] = 0.0;
for (int k = 0; k < outNum; k++)
{
pp[j] += qq[k] * v[j, k];
}
pp[j] = pp[j] * x1[j] * (1 - x1[j]);
//更新W
for (int i = 0; i < inNum; i++)
{
w[i, j] += rate * pp[j] * x[i];
}
}
//更新b2
for (int k = 0; k < outNum; k++)
{
b2[k] += rate * qq[k];
}
//更新b1
for (int j = 0; j < hideNum; j++)
{
b1[j] += rate * pp[j];
}
}//end isamp
e = Math.Sqrt(e);
// adjustWV(w,dw);
// adjustWV(v,dv);
}//end train
public void train_test()
{
double[,] pr = new double[,] { { 11.64, 2.06, 2.46, 21.6, 84, 1.95, 1.69, 0.48, 1.35, 2.8, 1, 2.75, 680 }, { 12.08, 1.33, 2.3, 23.6, 70, 2.2, 1.59, 0.42, 1.38, 1.74, 1.07, 3.21, 625 },
{13.11,1.01,1.7,15,78,2.98,3.18,0.26,2.28,5.3,1.12,3.18,502},{12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450},{12.37,1.13,2.16,19,87,3.5,3.1,0.19,1.87,4.45,1.22,2.87,420},
{12.17,1.45,2.53,19,104,1.89,1.75,0.45,1.03,2.95,1.45,2.23,355},{12.52,2.43,2.17,21,88,2.55,2.27,0.26,1.22,2,0.9,2.78,325},{11.79,2.13,2.78,28.5,92,2.13,2.24,0.58,1.76,3,0.97,2.44,466},
{12.04,4.3,2.38,22,80,2.1,1.75,0.42,1.35,2.6,0.79,2.57,580},{12.99,1.67,2.6,30,139,3.3,2.89,0.21,1.96,3.35,1.31,3.5,985},{11.96,1.09,2.3,21,101,3.38,2.14,0.13,1.65,3.21,0.99,3.13,886}
,{11.84,0.89,2.58,18,94,2.2,2.21,0.22,2.35,3.05,0.79,3.08,520},{12.67,0.98,2.24,18,99,2.2,1.94,0.3,1.46,2.62,1.23,3.16,450},{12.16,1.61,2.31,22.8,90,1.78,1.69,0.43,1.56,2.45,1.33,2.26,495}
,{11.65,1.67,2.62,26,88,1.92,1.61,0.4,1.34,2.6,1.36,3.21,562},{11.64,2.06,2.46,21.6,84,1.95,1.69,0.48,1.35,2.8,1,2.75,680},{12.08,1.33,2.3,23.6,70,2.2,1.59,0.42,1.38,1.74,1.07,3.21,625},
};
this.sampleNum = pr.GetLength(0);
int []er;
er=new int[outNum];
double []t;
t=new double [outNum];
for(int m=0;m
er[m]=0;
}
double pMax = 0.0;
for (int isamp = 0; isamp < sampleNum; isamp++)
{
for (int i = 0; i < inNum; i++)
{
if (Math.Abs(pr[isamp, i]) > pMax)
pMax = Math.Abs(pr[isamp, i]);
}
}
in_rate = pMax;
for (int c = 0; c < sampleNum; c++)
{
for (int i = 0; i < inNum; i
++)
{
x[i] = pr[c, i] / in_rate;
}
for (int j = 0; j < hideNum; j++)
{
o1[j] = 0.0;
for (int i = 0; i < inNum; i++)
{
o1[j] += w[i, j] * x[i];
}
x1[j] = 1.0 / (1.0 + Math.Exp(-o1[j] - b1[j]));
}
//计算输出层的输入和输出
for (int k = 0; k < outNum; k++)
{
o2[k] = 0.0;
for (int j = 0; j < hideNum; j++)
{
o2[k] += v[j, k] * x1[j];
}
x2[k] = 1.0 / (1.0 + Math.Exp(-o2[k] - b2[k]));
}
if ((x2[0] < x2[1] )&&( x2[1] > x2[2]))
err2++;
else if ((x2[0] < x2[2]) && (x2[1] < x2[2]))
err3++;
else if ((x2[0] > x2[1]) && (x2[0] > x2[2]))
err1++;
}
double t2 = (double)err2 / sampleNum;
Console.WriteLine("正确率:{0}", t2);
double t3 = (double)err3 / sampleNum;
Console.WriteLine("误判1:{0}", t3);
double t1 = (double)err1 / sampleNum;
Console.WriteLine("误判2:{0}", t1);
}
public class Class1
{
[STAThread]
static void Main(string[] args)
{
double[,] p1 = new double[,] { { 13.86, 1.35, 2.27, 16, 98, 2.98, 3.15, 0.22, 1.85, 7.22, 1.01, 3.55, 1045 },
{ 14.1, 2.16, 2.3, 18, 105, 2.95, 3.32, 0.22, 2.38, 5.75, 1.25, 3.17, 1510 }, { 12.2, 3.03, 2.32, 19, 96, 1.25, 0.49, 0.4, 0.73, 5.5, 0.66, 1.83, 510 },
{ 12.52, 2.43, 2.17, 21, 88, 2.55, 2.27, 0.26, 1.22, 2, 0.9, 2.78, 325 }, { 12.2, 3.03, 2.32, 19, 96, 1.25, 0.49, 0.4, 0.73, 5.5, 0.66, 1.83, 510 },
{ 12.72, 1.75, 2.28, 22.5, 84, 1.38, 1.76, 0.48, 1.63, 3.3, 0.88, 2.42, 488 }, { 12.69, 1.53, 2.26, 20.7, 80, 1.38, 1.46, 0.58, 1.62, 3.05, 0.96, 2.06, 495 },
{ 12.29, 2.83, 2.22, 18, 88, 2.45, 2.25, 0.25, 1.99, 2.15, 1.15, 3.3, 290 }, { 11.62, 1.99, 2.28, 18, 98, 3.02, 2.26, 0.17, 1.35, 3.25, 1.16, 2.96, 345 },
{14.23,1.71,2.43,15.6,127,2.8,3.06,0.28,2.29,5.64,1.04,3.92,1065},{13.2,1.78,2.14,11.2,100,2.65,2.76,0.26,1.28,4.38,1.05,3.4,1050},{13.16,2.36,2.67,18.6,101,2.8,3.24,0.3,2.81,5.68,1.03,3.17,1185},
{14.13,4.1,2.74,24.5,96,2.05,0.76,0.56,1.35,9.2,0.61,1.6,560},{13.17,2.59,2.37,20,120,1.65,0.68,0.53,1.46,9.3,0.6,1.62,840},{12.96,3.45,2.35
,18.5,106,1.39,0.7,0.4,0.94,5.28,0.68,1.75,675}
,{13.72,1.43,2.5,16.7,108,3.4,3.67,0.19,2.04,6.8,0.89,2.87,1285},{13.24,2.59,2.87,21,118,2.8,2.69,0.39,1.82,4.32,1.04,2.93,735},{13.73,4.36,2.26,22.5,88,1.28,.47,0.52,1.15,6.62,0.78,1.75,520},
{13.45,3.7,2.6,23,111,1.7,0.92,0.43,1.46,10.68,0.85,1.56,695},{12.37,1.63,2.3,24.5,88,2.22,2.45,0.4,1.9,2.12,0.89,2.78,342},{12.81,2.31,2.4,24,98,1.15,1.09,0.27,0.83,5.7,0.66,1.36,560},
{13.78,2.76,2.3,22,90,1.35,0.68,0.41,1.03,9.58,0.7,1.68,615},{13.29,1.97,2.68,16.8,102,3,3.23,0.31,1.66,6,1.07,2.84,1270},{13.11,1.01,1.7,15,78,2.98,3.18,0.26,2.28,5.3,1.12,3.18,502},
{11.76,2.68,2.92,20,103,1.75,2.03,0.6,1.05,3.8,1.23,2.5,607},{11.41,.74,2.5,21,88,2.48,2.01,0.42,1.44,3.08,1.1,2.31,434},{12.08,1.39,2.5,22.5,84,2.56,2.29,0.43,1.04,2.9,0.93,3.19,385}
,{11.03,1.51,2.2,21.5,85,2.46,2.17,0.52,2.01,1.9,1.71,2.87,407},{11.82,1.47,1.99,20.8,86,1.98,1.6,0.3,1.53,1.95,0.95,3.33,495},{13.41,3.84,2.12,18.8,90,2.45,2.68,0.27,1.48,4.28,0.91,3,1035},
{13.56,1.71,2.31,16.2,117,3.15,3.29,0.34,2.34,6.13,0.95,3.38,795},{14.13,4.1,2.74,24.5,96,2.05,0.76,0.56,1.35,9.2,0.61,1.6,560},{11.79,2.13,2.78,28.5,92,2.13,2.24,0.58,1.76,3,0.97,2.44,466},
{12.81,2.31,2.4,24,98,1.15,1.09,0.27,0.83,5.7,0.66,1.36,560},{12.7,3.55,2.36,21.5,106,1.7,1.2,0.17,0.84,5,0.78,1.29,600},{12.87,4.61,2.48,21.5,86,1.7,0.65,0.47,0.86,7.65,0.54,1.86,625},
{13.07,1.5,2.1,15.5,98,2.4,2.64,0.28,1.37,3.7,1.18,2.69,1020},{11.66,1.88,1.92,16,97,1.61,1.57,0.34,1.15,3.8,1.23,2.14,428},{13.03,0.9,1.71,16,86,1.95,2.03,0.24,1.46,4.6,1.19,2.48,392},
{11.84,2.89,2.23,18,112,1.72,1.32,0.43,0.95,2.65,.96,2.52,500},{12.33,0.99,1.95,14.8,136,1.9,1.85,0.35,2.76,3.4,1.06,2.31,750},{12.7,3.87,2.4,23,101,2.83,2.55,0.43,1.95,2.57,1.19,3.13,463},
{12.6,2.46,2.2,18.5,94,1.62,0.66,0.63,0.94,7.1,0.73,1.58,695},{13.41,3.84,2.12,18.8,90,2.45,2.68,0.27,1.48,4.28,0.91,3,1035},{13.88,1.89,2.59,15,101,3.25,3.56,0.17,1.7,5.43,0.88,3.56,1095},
{12.36,3.83,2.38,21,88,2.3,0.92,0.5,1.04,7.65,0.56,1.58,520},{12.96,3.45,2.35,18.5,106,1.39,0.7,0.4,0.94,5.28,0.68,1.75,675},{13.78,2.76,2.3,22,90,1.35,0.68,0.41,1.03,9.58,0.7,1.68,615},
{12.2,3.03,2.32,19,96,1.25,.49,0.4,0.73,5.5,0.66,1.83,510},{14.2,1.76,2.45,15.2,112,3.27,3.39,0.34,1.97,6.75,1.05,2.85,1450},{13.4,4.6,2.86,25,112,1.98,0.96,0.27,1.11,8.5,0.67,1.92,630},
{13.27,4.28,2.26,20,120,1.59,0.69,0.43,1.35,10.2,0.59,1.56,835},{13.17,2.59,2.37,20,120,1.65,0.68,0.53,1.46,9.3,0.6,1.62,840},{14.21,4.04,2.44,18.9,111,2.85,2.65,0.3,1.25,5.24,0.87,3.33,1080},
{13.05,1.73,2.04,12.4,92,2.72,3.27,0.17,2.91,7.2,1.12,2.91,1150},{13.49,1.66,2.24,24,87,1.88,1.84,0.27,1.03,3.74,0.98,2.78,472},{11.96,1.09,2.3,21,101,3.38,2.14,0.13,1.65,3.21,0.99,3.13,886},
{12.93,3.8,2.65,18.6,102,2.41,2.41,0.25,1.98,4.5,1.03,3.5
2,770},{13.71,1.86,2.36,16.6,101,2.61,2.88,0.27,1.69,3.8,1.11,4,1035},{12.85,1.6,2.52,17.8,95,2.48,2.37,0.26,1.46,3.93,1.09,3.63,1015},
{13.5,1.81,2.61,20,96,2.53,2.61,0.28,1.66,3.52,1.12,3.82,845},{13.05,2.05,3.22,25,124,2.63,2.68,0.47,1.92,3.58,1.13,3.2,830},{14.23,1.71,2.43,15.6,127,2.8,3.06,0.28,2.29,5.64,1.04,3.92,1065},
{13.2,1.78,2.14,11.2,100,2.65,2.76,0.26,1.28,4.38,1.05,3.4,1050}};
double[,] t1 = new double[,] { { 1, 0, 0 }, { 1, 0, 0 }, { 0, 0, 1 }, { 0, 1, 0 }, { 0, 0, 1 }, { 0, 1, 0 },
{ 0, 1, 0 }, { 0, 1, 0 }, { 0, 1, 0 },{1,0,0} ,{1,0,0},{1,0,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{1,0,0},{0,0,1},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,1,0},
{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,0,1},
{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0}};
BpNet bp = new BpNet(p1, t1);
double study = 0.0;
do
{
study++;
bp.train(p1, t1);
} while (bp.e > 0.005);
Console.WriteLine("study:{0}", study);
bp.train_test();
}
}
}
}