Skip to content

Commit

Permalink
Removed unnecessary StdHandlers module and cleaned up the source code.
Browse files Browse the repository at this point in the history
  • Loading branch information
okozelsk committed Jun 5, 2023
1 parent 56d4455 commit cb98ab0
Show file tree
Hide file tree
Showing 14 changed files with 174 additions and 141 deletions.
2 changes: 1 addition & 1 deletion EasyMLCore/Data/FeatureFilter/FeatureFilterBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ protected FeatureFilterBase(FeatureFilterBase source)
{
ValueType = source.ValueType;
Use = source.Use;
SamplesStat = new BasicStat(source.SamplesStat);
SamplesStat = source.SamplesStat.DeepClone();
return;
}

Expand Down
6 changes: 3 additions & 3 deletions EasyMLCore/Data/TaskErrStat/CategoricalErrStat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ public CategoricalErrStat(IComputableTaskSpecific computableUnit, SampleDataset
public CategoricalErrStat(CategoricalErrStat source)
: base(source)
{
ClassificationLogLossStat = new BasicStat(source.ClassificationLogLossStat);
WrongClassificationStat = new BasicStat(source.WrongClassificationStat);
LowProbabilityClassificationStat = new BasicStat(source.LowProbabilityClassificationStat);
ClassificationLogLossStat = source.ClassificationLogLossStat.DeepClone();
WrongClassificationStat = source.WrongClassificationStat.DeepClone();
LowProbabilityClassificationStat = source.LowProbabilityClassificationStat.DeepClone();
return;
}

Expand Down
8 changes: 4 additions & 4 deletions EasyMLCore/Data/TaskErrStat/MultipleDecisionErrStat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,10 @@ public MultipleDecisionErrStat(MultipleDecisionErrStat source)
FeatureBinDecisionStats[i] = new SingleDecisionErrStat(source.FeatureBinDecisionStats[i]);
}
TotalBinFalseFlagStat = new BasicStat[2];
TotalBinFalseFlagStat[0] = new BasicStat(source.TotalBinFalseFlagStat[0]);
TotalBinFalseFlagStat[1] = new BasicStat(source.TotalBinFalseFlagStat[1]);
TotalBinWrongDecisionStat = new BasicStat(source.TotalBinWrongDecisionStat);
TotalBinLogLossStat = new BasicStat(source.TotalBinLogLossStat);
TotalBinFalseFlagStat[0] = source.TotalBinFalseFlagStat[0].DeepClone();
TotalBinFalseFlagStat[1] = source.TotalBinFalseFlagStat[1].DeepClone();
TotalBinWrongDecisionStat = source.TotalBinWrongDecisionStat.DeepClone();
TotalBinLogLossStat = source.TotalBinLogLossStat.DeepClone();
return;
}

Expand Down
2 changes: 1 addition & 1 deletion EasyMLCore/Data/TaskErrStat/MultiplePrecisionErrStat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ public MultiplePrecisionErrStat(MultiplePrecisionErrStat source)
{
FeaturePrecisionStats[i] = new SinglePrecisionErrStat(source.FeaturePrecisionStats[i]);
}
TotalPrecisionStat = new BasicStat(source.TotalPrecisionStat);
TotalPrecisionStat = source.TotalPrecisionStat.DeepClone();
return;
}

Expand Down
10 changes: 5 additions & 5 deletions EasyMLCore/Data/TaskErrStat/SingleDecisionErrStat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,12 @@ public SingleDecisionErrStat(IComputableTaskSpecific computableUnit, SampleDatas
public SingleDecisionErrStat(SingleDecisionErrStat source)
: base(source)
{
IdealStat = new BasicStat(source.IdealStat);
IdealStat = source.IdealStat.DeepClone();
FalseFlagStat = new BasicStat[2];
FalseFlagStat[0] = new BasicStat(source.FalseFlagStat[0]);
FalseFlagStat[1] = new BasicStat(source.FalseFlagStat[1]);
WrongDecisionStat = new BasicStat(source.WrongDecisionStat);
LogLossStat = new BasicStat(source.LogLossStat);
FalseFlagStat[0] = source.FalseFlagStat[0].DeepClone();
FalseFlagStat[1] = source.FalseFlagStat[1].DeepClone();
WrongDecisionStat = source.WrongDecisionStat.DeepClone();
LogLossStat = source.LogLossStat.DeepClone();
return;
}

Expand Down
2 changes: 1 addition & 1 deletion EasyMLCore/Data/TaskErrStat/SinglePrecisionErrStat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public SinglePrecisionErrStat(IComputableTaskSpecific computableUnit, SampleData
public SinglePrecisionErrStat(SinglePrecisionErrStat source)
: base(source)
{
FeaturePrecisionStat = new BasicStat(source.FeaturePrecisionStat);
FeaturePrecisionStat = source.FeaturePrecisionStat.DeepClone();
return;
}

Expand Down
2 changes: 1 addition & 1 deletion EasyMLCore/MLP/Model/MLPModelConfidenceMetrics.cs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ BasicStat featureConfidencesStat
CategoricalAccuracy = categoricalAccuracy;
BinaryAccuracy = binaryAccuracy;
FeatureConfidences = (double[])featureConfidences.Clone();
FeatureConfidencesStat = new BasicStat(featureConfidencesStat);
FeatureConfidencesStat = featureConfidencesStat.DeepClone();
return;
}

Expand Down
4 changes: 2 additions & 2 deletions EasyMLCore/MLP/Model/Network/MLPEngine.cs
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ public MLPEngine(MLPEngine source)
LayerCollection.Add(layer.DeepClone());
}
_flatWeights = (double[])source._flatWeights.Clone();
HLWeightsStat = new BasicStat(source.HLWeightsStat);
OLWeightsStat = new BasicStat(source.OLWeightsStat);
HLWeightsStat = source.HLWeightsStat.DeepClone();
OLWeightsStat = source.OLWeightsStat.DeepClone();
return;
}

Expand Down
35 changes: 16 additions & 19 deletions EasyMLCore/MLP/Model/RVFL/RVFLPreprocessor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ public RVFLPreprocessor(RVFLPreprocessor source)
{
_inputFilters[i] = source._inputFilters[i].DeepClone();
}
WeightsStat = new BasicStat(source.WeightsStat);
BiasesStat = new BasicStat(source.BiasesStat);
WeightsStat = source.WeightsStat.DeepClone();
BiasesStat = source.BiasesStat.DeepClone();
Initialized = source.Initialized;
return;
}
Expand Down Expand Up @@ -185,15 +185,14 @@ RVFLModelConfig modelCfg
/// <summary>
/// Randomizes internal weights.
/// </summary>
/// <param name="stdTrainingInputs">Standardized training input data.</param>
/// <param name="rand">The random generator to be used.</param>
private void RandomizeWeights(double[][] stdTrainingInputs, Random rand)
private void RandomizeWeights(Random rand)
{
WeightsStat.Reset();
BiasesStat.Reset();
foreach (Layer layer in LayerCollection)
{
layer.RandomizeWeights(stdTrainingInputs, _flatWeights, rand);
layer.RandomizeWeights(_flatWeights, rand);
WeightsStat.Merge(layer.WeightsStat);
BiasesStat.Merge(layer.BiasesStat);
}
Expand Down Expand Up @@ -295,7 +294,7 @@ public SampleDataset Init(SampleDataset trainingData,
}
});
//New weights
RandomizeWeights(stdInputs, rand);
RandomizeWeights(rand);
//Activation statistics
BasicStat[][][] activationStats = new BasicStat[LayerCollection.Count][][];
BasicStat[][] weightStats = new BasicStat[LayerCollection.Count][];
Expand All @@ -312,8 +311,8 @@ public SampleDataset Init(SampleDataset trainingData,
{
activationStats[i][j][k] = new BasicStat();
}
weightStats[i][j] = new BasicStat(LayerCollection[i].Pools[j].WeightsStat);
biasStats[i][j] = new BasicStat(LayerCollection[i].Pools[j].BiasesStat);
weightStats[i][j] = LayerCollection[i].Pools[j].WeightsStat.DeepClone();
biasStats[i][j] = LayerCollection[i].Pools[j].BiasesStat.DeepClone();
}
}
//Output
Expand Down Expand Up @@ -421,8 +420,8 @@ internal Layer(Layer source)
NumOfLayerNeurons = source.NumOfLayerNeurons;
NumOfLayerWeights = source.NumOfLayerWeights;
NumOfPredictors = source.NumOfPredictors;
WeightsStat = new BasicStat(source.WeightsStat);
BiasesStat = new BasicStat(source.BiasesStat);
WeightsStat = source.WeightsStat.DeepClone();
BiasesStat = source.BiasesStat.DeepClone();
return;
}

Expand Down Expand Up @@ -474,16 +473,15 @@ internal Layer DeepClone()
/// <summary>
/// Randomly initializes layer weights.
/// </summary>
/// <param name="stdTrainingInputs">Standardized training input data.</param>
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
/// <param name="rand">Random generator to be used.</param>
internal void RandomizeWeights(double[][] stdTrainingInputs, double[] flatWeights, Random rand)
internal void RandomizeWeights(double[] flatWeights, Random rand)
{
WeightsStat.Reset();
BiasesStat.Reset();
foreach (Pool pool in Pools)
{
pool.RandomizeWeights(stdTrainingInputs, flatWeights, rand);
pool.RandomizeWeights(flatWeights, rand);
WeightsStat.Merge(pool.WeightsStat);
BiasesStat.Merge(pool.BiasesStat);
}
Expand Down Expand Up @@ -571,8 +569,8 @@ public class Pool
public BasicStat BiasesStat { get; }

//Attributes
private double _scaleFactorW;
private double _scaleFactorB;
private readonly double _scaleFactorW;
private readonly double _scaleFactorB;

//Constructor
/// <summary>
Expand All @@ -591,8 +589,8 @@ internal Pool(Pool source)
BiasesStartFlatIdx = source.BiasesStartFlatIdx;
NeuronsStartFlatIdx = source.NeuronsStartFlatIdx;
NumOfAllWeights = source.NumOfAllWeights;
WeightsStat = new BasicStat(source.WeightsStat);
BiasesStat = new BasicStat(source.BiasesStat);
WeightsStat = source.WeightsStat.DeepClone();
BiasesStat = source.BiasesStat.DeepClone();
_scaleFactorW = source._scaleFactorW;
_scaleFactorB = source._scaleFactorB;
return;
Expand Down Expand Up @@ -644,10 +642,9 @@ internal Pool DeepClone()
/// <summary>
/// Randomly initializes pool weights.
/// </summary>
/// <param name="stdTrainingInputs">Standardized training input data.</param>
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
/// <param name="rand">Random generator to be used.</param>
internal void RandomizeWeights(double[][] stdTrainingInputs, double[] flatWeights, Random rand)
internal void RandomizeWeights(double[] flatWeights, Random rand)
{
double[] wBuff = new double[NumOfInputNodes * NumOfNeurons];
double[] bBuff = new double[NumOfNeurons];
Expand Down
Loading

0 comments on commit cb98ab0

Please sign in to comment.