Skip to content
This repository has been archived by the owner on May 11, 2023. It is now read-only.

Commit

Permalink
Removal of LinRegrTrainer, enhancements of the Ridge regression perfo…
Browse files Browse the repository at this point in the history
…rmance and NP Statistics.
  • Loading branch information
okozelsk committed Feb 7, 2019
1 parent 729233f commit 73333ae
Show file tree
Hide file tree
Showing 10 changed files with 565 additions and 801 deletions.
1,012 changes: 375 additions & 637 deletions Demo/DemoConsoleApp/DemoSettings.xml

Large diffs are not rendered by default.

6 changes: 5 additions & 1 deletion Demo/DemoConsoleApp/SMDemo.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,11 @@ public static ReadoutUnit.RegressionControlOutArgs RegressionControl(ReadoutUnit
ReadoutUnit.RegressionControlOutArgs outArgs = new ReadoutUnit.RegressionControlOutArgs
{
//Call the default implementation of the judgement.
CurrentIsBetter = ReadoutUnit.IsBetter(inArgs.TaskType, inArgs.CurrReadoutUnit, inArgs.BestReadoutUnit)
CurrentIsBetter = ReadoutUnit.IsBetter(inArgs.TaskType, inArgs.CurrReadoutUnit, inArgs.BestReadoutUnit),
StopRegression = (inArgs.TaskType == CommonEnums.TaskType.Classification &&
inArgs.CurrReadoutUnit.TrainingBinErrorStat.TotalErrStat.Sum == 0 &&
inArgs.CurrReadoutUnit.TestingBinErrorStat.TotalErrStat.Sum == 0
)
};
//Progress info
if (outArgs.CurrentIsBetter ||
Expand Down
50 changes: 30 additions & 20 deletions RCNet/MathTools/MatrixMath/Matrix.cs
Original file line number Diff line number Diff line change
Expand Up @@ -472,19 +472,24 @@ public double EstimateLargestEigenValue(out double[] resultEigenVector, int maxN
}

/// <summary>
/// Method uses LU decomposition to solve system of the linear equations.
/// Method uses LU decomposition to solve system of linear equations.
/// Matrix must be squared.
/// </summary>
/// <param name="rightPart">Desired results (right part of linear equations)</param>
/// <returns>Linear coefficients</returns>
public double[] SolveUsingLU(double[] rightPart)
/// <param name="desired">Vector of desired results (right part of linear equations)</param>
/// <returns>Vector of computed linear coefficients</returns>
public Vector SolveUsingLU(Vector desired)
{
//Firstly check squared matrix
//Checks squared matrix
if (!IsSquared)
{
throw new Exception("Matrix must be squared.");
}
if (NumOfRows != desired.Length)
{
throw new Exception("Number of matrix rows must be equal to length of desired results vector.");
}
int n = NumOfRows;
double[] desiredData = desired.Data;
double[,] lu = new double[n, n];
double sum = 0;
//LU decomposition
Expand Down Expand Up @@ -518,7 +523,7 @@ public double[] SolveUsingLU(double[] rightPart)
{
sum += lu[i, k] * y[k];
}
y[i] = rightPart[i] - sum;
y[i] = desiredData[i] - sum;
}
// find solution of Ux = y
double[] x = new double[n];
Expand All @@ -531,7 +536,7 @@ public double[] SolveUsingLU(double[] rightPart)
}
x[i] = (1 / lu[i, i]) * (y[i] - sum);
}
return x;
return new Vector(x, false);
}

/// <summary>
Expand Down Expand Up @@ -1175,40 +1180,45 @@ public static Matrix Inverse(Matrix A)
}

/// <summary>
/// Creates matrix prepared for weights computation.
/// R = [Inv(X'*X + lambda*I)*X']
/// then
/// Weights = R * Y, where Y is the vector of desired results
/// Computes ridge regression weights
/// </summary>
/// <param name="desired">Desired results vector</param>
/// <param name="lambda">Hyperparameter lambda of Ridge Regression method</param>
public Matrix GetRidgeRegressionMatrix(double lambda)
/// <returns>Vector of computed weights</returns>
public Vector RidgeRegression(Vector desired, double lambda = 0)
{
//Checks
if (NumOfRows != desired.Length)
{
throw new Exception("Number of matrix rows must be equal to length of desired results vector.");
}
//Computation
Matrix Xt = Transpose();
Matrix R = Xt * this;
if (lambda > 0)
{
R.AddScalarToDiagonal(lambda);
}
R.Inverse();
return R * Xt;
//For better performance must be ensured that (Xt * desired) is computed first and after then is
//computed R * (resulting vector).
return R * (Xt * desired);
}

/// <summary>
/// Computes ridge regression weights
/// </summary>
/// <param name="X">Predictor matrix</param>
/// <param name="desired">Desired result vector</param>
/// <param name="desired">Desired results vector</param>
/// <param name="lambda">Hyperparameter lambda of Ridge Regression method</param>
/// <returns>Weight vector</returns>
/// <returns>Vector of computed weights</returns>
public static Vector RidgeRegression(Matrix X, Vector desired, double lambda = 0)
{
if(X.NumOfRows != desired.Length)
{
throw new Exception("Number of matrix rows must be equal to desired vector length.");
}
return X.GetRidgeRegressionMatrix(lambda) * desired;
return X.RidgeRegression(desired, lambda);
}



}//Matrix

} //Namespace
Expand Down
4 changes: 2 additions & 2 deletions RCNet/Neural/Network/FF/QRDRegrTrainer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ namespace RCNet.Neural.Network.FF
/// Implements the QRD regression trainer.
/// Principle is to add each iteration less and less piece of white-noise to predictors
/// and then perform the standard QR decomposition (regression).
/// This technique allows to find more stable weight solution than just a QR decomposition
/// This technique allows to find more stable (generalized) weight solution than just a QR decomposition
/// of pure predictors.
/// FF network has to have only output layer with the Identity activation.
/// </summary>
Expand Down Expand Up @@ -86,7 +86,7 @@ Random rand
//Check samples conditions
if(inputVectorCollection.Count < inputVectorCollection[0].Length + 1)
{
throw new Exception("Can´t create trainer. Insufficient number of training samples. Minimum is " + (inputVectorCollection[0].Length + 1).ToString() + ".");
throw new Exception($"Can´t create trainer. Insufficient number of training samples {inputVectorCollection.Count}. Minimum is {(inputVectorCollection[0].Length + 1)}.");
}
//Parameters
_settings = settings;
Expand Down
6 changes: 3 additions & 3 deletions RCNet/Neural/Network/FF/RidgeRegrTrainer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -170,15 +170,15 @@ public bool Iteration()
Matrix lambdaInvMatrix = new Matrix(_baseSquareMatrix);
//Apply lambda
lambdaInvMatrix.AddScalarToDiagonal(_currLambda);
//Inverse
//Apply inverse
lambdaInvMatrix.Inverse();
//New weights
//New weights buffer
double[] newWeights = new double[_net.NumOfWeights];
//Weights for each output neuron
for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
{
Vector tPredictorsOutputsProduct = _transposedPredictorsMatrix * _outputSingleColVectorCollection[outputIdx];
//Regression
//Weights solution
Vector weights = lambdaInvMatrix * tPredictorsOutputsProduct;
//Store weights
for (int i = 0; i < weights.Length - 1; i++)
Expand Down
12 changes: 12 additions & 0 deletions RCNet/Neural/Network/SM/Preprocessing/NeuralPreprocessor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,17 @@ Object userObject
/// </summary>
public List<Reservoir.PredictorNeuron> PredictorNeuronCollection { get; }
/// <summary>
/// Number of neurons
/// </summary>
public int NumOfNeurons { get; }
/// <summary>
/// Number of predictors
/// </summary>
public int NumOfPredictors { get; }
/// <summary>
/// Number of internal synapses
/// </summary>
public int NumOfInternalSynapses { get; }

//Constructor
/// <summary>
Expand Down Expand Up @@ -106,14 +114,18 @@ public NeuralPreprocessor(NeuralPreprocessorSettings settings, int randomizerSee
//Random generator used for reservoir structure initialization
Random rand = (randomizerSeek < 0 ? new Random() : new Random(randomizerSeek));
PredictorNeuronCollection = new List<Reservoir.PredictorNeuron>();
NumOfNeurons = 0;
NumOfPredictors = 0;
NumOfInternalSynapses = 0;
ReservoirCollection = new List<Reservoir>(_settings.ReservoirInstanceDefinitionCollection.Count);
foreach(NeuralPreprocessorSettings.ReservoirInstanceDefinition instanceDefinition in _settings.ReservoirInstanceDefinitionCollection)
{
Reservoir reservoir = new Reservoir(instanceDefinition, _dataRange, rand);
ReservoirCollection.Add(reservoir);
PredictorNeuronCollection.AddRange(reservoir.PredictorNeuronCollection);
NumOfNeurons += reservoir.Size;
NumOfPredictors += reservoir.NumOfOutputPredictors;
NumOfInternalSynapses += reservoir.NumOfInternalSynapses;
}
if(_settings.InputConfig.RouteExternalInputToReadout)
{
Expand Down
62 changes: 57 additions & 5 deletions RCNet/Neural/Network/SM/Preprocessing/Reservoir.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ public class Reservoir
/// Number of reservoir's output predictors
/// </summary>
public int NumOfOutputPredictors { get; }
/// <summary>
/// Number of internal synapses
/// </summary>
public int NumOfInternalSynapses { get; private set; }

//Constructor
/// <summary>
Expand Down Expand Up @@ -192,6 +196,7 @@ public Reservoir(NeuralPreprocessorSettings.ReservoirInstanceDefinition instance
//-----------------------------------------------------------------------------
//Interconnections
//-----------------------------------------------------------------------------
NumOfInternalSynapses = 0;
//Connection banks allocations
_neuronInputConnectionsCollection = new SortedList<int, ISynapse>[_reservoirNeuronCollection.Length];
_neuronNeuronConnectionsCollection = new SortedList<int, ISynapse>[_reservoirNeuronCollection.Length];
Expand Down Expand Up @@ -489,6 +494,8 @@ select neuron
int seed = rand.Next();
randFarm[i] = new Random(seed);
}
int[] synapsesCounter = new int[sourceNeurons.Count];
synapsesCounter.Populate(0);
Parallel.For(0, sourceNeurons.Count, sourceNeuronIdx =>
//for(int sourceNeuronIdx = 0; sourceNeuronIdx < sourceNeurons.Count; sourceNeuronIdx++)
{
Expand Down Expand Up @@ -550,11 +557,19 @@ select neuron
tauDecay: dss.TauDecay
);
}
AddInterconnection(_neuronNeuronConnectionsCollection, synapse);
if(AddInterconnection(_neuronNeuronConnectionsCollection, synapse))
{
++synapsesCounter[sourceNeuronIdx];
}
//Remove targetNeuron from tmp collection
tmpRelTargetNeuronCollection.RemoveAt(targetNeuronIndex);
}//connNum
});
//Increment total number of internal synapses
foreach (int count in synapsesCounter)
{
NumOfInternalSynapses += count;
}
return;
}

Expand Down Expand Up @@ -682,11 +697,16 @@ private void SetPool2PoolInterconnections(Random rand, ReservoirSettings.PoolsIn
/// </summary>
public ReservoirStat CollectStatistics()
{
ReservoirStat stats = new ReservoirStat(InstanceDefinition.InstanceName, InstanceDefinition.Settings.SettingsName);
ReservoirStat stats = new ReservoirStat(InstanceDefinition.InstanceName,
InstanceDefinition.Settings.SettingsName,
Size,
NumOfOutputPredictors,
NumOfInternalSynapses
);
int poolID = 0;
foreach (PoolSettings poolSettings in InstanceDefinition.Settings.PoolSettingsCollection)
{
ReservoirStat.PoolStat poolStat = new ReservoirStat.PoolStat(poolSettings);
ReservoirStat.PoolStat poolStat = new ReservoirStat.PoolStat(poolSettings, _poolNeuronsCollection[poolID].Length);
//Neurons statistics
foreach (INeuron neuron in _poolNeuronsCollection[poolID])
{
Expand Down Expand Up @@ -927,6 +947,18 @@ public class ReservoirStat
/// </summary>
public string ReservoirSettingsName { get; }
/// <summary>
/// Total number of neurons within the reservoir
/// </summary>
public int TotalNumOfNeurons { get; }
/// <summary>
/// Total number of predictors
/// </summary>
public int TotalNumOfPredictors { get; }
/// <summary>
/// Total number of internal synapses
/// </summary>
public int TotalNumOfInternalSynapses { get; }
/// <summary>
/// Collection of resrvoir pools stats
/// </summary>
public List<PoolStat> PoolStatCollection { get; }
Expand All @@ -937,10 +969,21 @@ public class ReservoirStat
/// </summary>
/// <param name="reservoirInstanceName">Name of the reservoir instance</param>
/// <param name="reservoirSettingsName">Name of the reservoir configuration settings</param>
public ReservoirStat(string reservoirInstanceName, string reservoirSettingsName)
/// <param name="numOfNeurons">Total number of neurons</param>
/// <param name="numOfPredictors">Total number of predictors</param>
/// <param name="numOfInternalSynapses">Total number of synapses</param>
public ReservoirStat(string reservoirInstanceName,
string reservoirSettingsName,
int numOfNeurons,
int numOfPredictors,
int numOfInternalSynapses
)
{
ReservoirInstanceName = reservoirInstanceName;
ReservoirSettingsName = reservoirSettingsName;
TotalNumOfNeurons = numOfNeurons;
TotalNumOfPredictors = numOfPredictors;
TotalNumOfInternalSynapses = numOfInternalSynapses;
PoolStatCollection = new List<PoolStat>();
return;
}
Expand All @@ -957,6 +1000,11 @@ public class PoolStat
/// </summary>
public string PoolName { get; }

/// <summary>
/// Number of neurons within the pool
/// </summary>
public int NumOfNeurons { get; }

/// <summary>
/// Collection of the neuron group statistics
/// </summary>
Expand All @@ -977,9 +1025,13 @@ public class PoolStat
/// Creates an unitialized instance
/// </summary>
/// <param name="poolSettings">Settings of the neuron pool</param>
public PoolStat(PoolSettings poolSettings)
/// <param name="numOfNeurons">Number of neurons within the pool</param>
public PoolStat(PoolSettings poolSettings,
int numOfNeurons
)
{
PoolName = poolSettings.Name;
NumOfNeurons = numOfNeurons;
NeuronGroupStatCollection = new NeuronGroupStat[poolSettings.NeuronGroups.Count];
for(int i = 0; i < poolSettings.NeuronGroups.Count; i++)
{
Expand Down
1 change: 1 addition & 0 deletions RCNet/Neural/Network/SM/Readout/ReadoutLayer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -445,6 +445,7 @@ public string GetTrainingResultsReport(int margin)
sb.Append(leftMargin + $" Smallest error: {ces.NatPrecissionErrStat.Min.ToString(CultureInfo.InvariantCulture)}" + Environment.NewLine);
sb.Append(leftMargin + $" Average error: {ces.NatPrecissionErrStat.ArithAvg.ToString(CultureInfo.InvariantCulture)}" + Environment.NewLine);
}
sb.Append(Environment.NewLine);
}
return sb.ToString();
}
Expand Down
Loading

0 comments on commit 73333ae

Please sign in to comment.