[ ] C# |
( ) , , , . , , . ( ) ( ). , .
, , . 11- - 1~2- . , , , , , " ". ( ) " . " , // stuff'. , - Yandex/Mail.ru/etc. .
, , , . , Unity, 7 ( ). , , , ( ) . . , Theano Tensor Flow, - "" API, GPU( Theano/Tensor Flow/etc.).
ATI Radeon HD Mobility 4570. , , , . , . .
, , , . .
( ), , , , XOR XAND(XNOR, ).
, , . . , , Visual Studio 2017, .NET Framework 4.7.
.
.
4 ( ).
backpropagation.
.(0.001)
0.1.
.
, - , , , ( ). , using' .
release||debug ( ) (fieldname)_memory.xml . . , XML , .
using System.Xml;
using static System.Math;
using static System.Console;
: . . .
enum MemoryMode { GET, SET }
enum NeuronType { Hidden, Output }
, : Neural Network.
namespace NeuralNetwork
{
//, ,
}
, , . . , , , . , . : ? , , , , , . - InputLayer. . - 1 0, XOR XAND( XOR, XAND).
class InputLayer
{
private (double[], double[])[] _trainset = new(double[], double[])[]//-, 2
{
(new double[]{ 0, 0 }, new double[]{ 0, 1 }),
(new double[]{ 0, 1 }, new double[]{ 1, 0 }),
(new double[]{ 1, 0 }, new double[]{ 1, 0 }),
(new double[]{ 1, 1 }, new double[]{ 0, 1 })
};
// -
public (double[], double[])[] Trainset { get => _trainset; }// C# 7
}
, , . , . -, ( ), , . .
class Neuron
{
public Neuron(double[] inputs, double[] weights, NeuronType type)
{
_type = type;
_weights = weights;
_inputs = inputs;
}
private NeuronType _type;
private double[] _weights;
private double[] _inputs;
public double[] Weights { get => _weights; set => _weights = value; }
public double[] Inputs { get => _inputs; set => _inputs = value; }
public double Output { get => Activator(_inputs, _weights); }
private double Activator(double[] i, double[] w)//
{
double sum = 0;
for (int l = 0; l < i.Length; ++l)
sum += i[l] * w[l];//
return Pow(1 + Exp(-sum), -1);//
}
public double Derivativator(double outsignal) => outsignal * (1 - outsignal);//
public double Gradientor(double error, double dif, double g_sum) => (_type == NeuronType.Output) ? error * dif : g_sum * dif;//g_sum -
}
, . , . , , . . , , . ( ) ( ) [ X ]. , , null reference. , , , .
abstract class Layer// protected
{//type
protected Layer(int non, int nopn, NeuronType nt, string type)
{// WeightInitialize
numofneurons = non;
numofprevneurons = nopn;
Neurons = new Neuron[non];
double[,] Weights = WeightInitialize(MemoryMode.GET, type);
for (int i = 0; i < non; ++i)
{
double[] temp_weights = new double[nopn];
for (int j = 0; j < nopn; ++j)
temp_weights[j] = Weights[i, j];
Neurons[i] = new Neuron(null, temp_weights, nt);// null
}
}
protected int numofneurons;//
protected int numofprevneurons;//
protected const double learningrate = 0.1d;//
Neuron[] _neurons;
public Neuron[] Neurons { get => _neurons; set => _neurons = value; }
public double[] Data// null ,
{//
set//(, , etc.)
{// input' ,
for (int i = 0; i < Neurons.Length; ++i)
Neurons[i].Inputs = value;
}//
}
public double[,] WeightInitialize(MemoryMode mm, string type)
{
double[,] _weights = new double[numofneurons, numofprevneurons];
WriteLine($"{type} weights are being initialized...");
XmlDocument memory_doc = new XmlDocument();
memory_doc.Load($"{type}_memory.xml");
XmlElement memory_el = memory_doc.DocumentElement;
switch (mm)
{
case MemoryMode.GET:
for (int l = 0; l < _weights.GetLength(0); ++l)
for (int k = 0; k < _weights.GetLength(1); ++k)
_weights[l, k] = double.Parse(memory_el.ChildNodes.Item(k + _weights.GetLength(1) * l).InnerText.Replace(',', '.'), System.Globalization.CultureInfo.InvariantCulture);//parsing stuff
break;
case MemoryMode.SET:
for (int l = 0; l < Neurons.Length; ++l)
for (int k = 0; k < numofprevneurons; ++k)
memory_el.ChildNodes.Item(k + numofprevneurons * l).InnerText = Neurons[l].Weights[k].ToString();
break;
}
memory_doc.Save($"{type}_memory.xml");
WriteLine($"{type} weights have been initialized...");
return _weights;
}
abstract public void Recognize(Network net, Layer nextLayer);//
abstract public double[] BackwardPass(double[] stuff);//
}
Layer , . , "" base ( , ).
-: Hidden Output. .
class HiddenLayer : Layer
{
public HiddenLayer(int non, int nopn, NeuronType nt, string type) : base(non, nopn, nt, type){}
public override void Recognize(Network net, Layer nextLayer)
{
double[] hidden_out = new double[Neurons.Length];
for (int i = 0; i < Neurons.Length; ++i)
hidden_out[i] = Neurons[i].Output;
nextLayer.Data = hidden_out;
}
public override double[] BackwardPass(double[] gr_sums)
{
double[] gr_sum = null;
//
// -,
//
for (int i = 0; i < numofneurons; ++i)
for (int n = 0; n < numofprevneurons; ++n)
Neurons[i].Weights[n] += learningrate * Neurons[i].Inputs[n] * Neurons[i].Gradientor(0, Neurons[i].Derivativator(Neurons[i].Output), gr_sums[i]);//
return gr_sum;
}
}
class OutputLayer : Layer
{
public OutputLayer(int non, int nopn, NeuronType nt, string type) : base(non, nopn, nt, type){}
public override void Recognize(Network net, Layer nextLayer)
{
for (int i = 0; i < Neurons.Length; ++i)
net.fact[i] = Neurons[i].Output;
}
public override double[] BackwardPass(double[] errors)
{
double[] gr_sum = new double[numofprevneurons];
for (int j = 0; j < gr_sum.Length; ++j)//
{
double sum = 0;
for (int k = 0; k < Neurons.Length; ++k)
sum += Neurons[k].Weights[j] * Neurons[k].Gradientor(errors[k], Neurons[k].Derivativator(Neurons[k].Output), 0);//
gr_sum[j] = sum;
}
for (int i = 0; i < numofneurons; ++i)
for (int n = 0; n < numofprevneurons; ++n)
Neurons[i].Weights[n] += learningrate * Neurons[i].Inputs[n] * Neurons[i].Gradientor(errors[i], Neurons[i].Derivativator(Neurons[i].Output), 0);//
return gr_sum;
}
}
, . : , , "". . backpropagation, , , 0.001. Network, , , .
class Network
{
//
InputLayer input_layer = new InputLayer();
public HiddenLayer hidden_layer = new HiddenLayer(4, 2, NeuronType.Hidden, nameof(hidden_layer));
public OutputLayer output_layer = new OutputLayer(2, 4, NeuronType.Output, nameof(output_layer));
//
public double[] fact = new double[2];// 2
//
double GetMSE(double[] errors)
{
double sum = 0;
for (int i = 0; i < errors.Length; ++i)
sum += Pow(errors[i], 2);
return 0.5d * sum;
}
//
double GetCost(double[] mses)
{
double sum = 0;
for (int i = 0; i < mses.Length; ++i)
sum += mses[i];
return (sum / mses.Length);
}
//
static void Train(Network net)//backpropagation method
{
const double threshold = 0.001d;//
double[] temp_mses = new double[4];//
double temp_cost = 0;//
do
{
for (int i = 0; i < net.input_layer.Trainset.Length; ++i)
{
//
net.hidden_layer.Data = net.input_layer.Trainset[i].Item1;
net.hidden_layer.Recognize(null, net.output_layer);
net.output_layer.Recognize(net, null);
//
double[] errors = new double[net.input_layer.Trainset[i].Item2.Length];
for (int x = 0; x < errors.Length; ++x)
errors[x] = net.input_layer.Trainset[i].Item2[x] - net.fact[x];
temp_mses[i] = net.GetMSE(errors);
//
double[] temp_gsums = net.output_layer.BackwardPass(errors);
net.hidden_layer.BackwardPass(temp_gsums);
}
temp_cost = net.GetCost(temp_mses);//
//debugging
WriteLine($"{temp_cost}");
} while (temp_cost > threshold);
// ""
net.hidden_layer.WeightInitialize(MemoryMode.SET, nameof(hidden_layer));
net.output_layer.WeightInitialize(MemoryMode.SET, nameof(output_layer));
}
//
static void Test(Network net)
{
for (int i = 0; i < net.input_layer.Trainset.Length; ++i)
{
net.hidden_layer.Data = net.input_layer.Trainset[i].Item1;
net.hidden_layer.Recognize(null, net.output_layer);
net.output_layer.Recognize(net, null);
for (int j = 0; j < net.fact.Length; ++j)
WriteLine($"{net.fact[j]}");
WriteLine();
}
}
//
static void Main(string[] args)
{
Network net = new Network();
Train(net);
Test(net);
ReadKey();// :)
}
}
.
, , . , - , InputLayer . ( ) C# MLP- MNIST( ) Python(, ).