From 4c97a0de7e0e4d96843cdcd27e426b300d805a5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Old=C5=99ich=20Ko=C5=BEelsk=C3=BD?= Date: Sun, 3 Jan 2021 19:13:44 +0100 Subject: [PATCH] The redesign of the Readout Layer, introduction of the Cluster Chain as the standard computation unit for the Readout Unit and One Takes All group. Enhanced data bundle folderization. Enhanced and revised code comments. --- Demo/DemoConsoleApp/Examples/ExampleBase.cs | 70 +- Demo/DemoConsoleApp/Examples/FFNetBoolAlg.cs | 20 +- .../LibrasClassificationESNDesigner.cs | 10 +- .../LibrasClassificationLSMDesigner.cs | 37 +- .../LibrasClassificationNPBypassedDesigner.cs | 19 +- .../Examples/TTOOForecastDesigner.cs | 15 +- .../Examples/TTOOForecastFromScratch.cs | 120 +- Demo/DemoConsoleApp/Log/ConsoleLog.cs | 12 +- Demo/DemoConsoleApp/Log/IOutputLog.cs | 6 +- Demo/DemoConsoleApp/Playground.cs | 262 ++- Demo/DemoConsoleApp/Program.cs | 6 +- Demo/DemoConsoleApp/SM/SMDemo.cs | 91 +- Demo/DemoConsoleApp/SM/SMDemoSettings.cs | 27 +- Demo/DemoConsoleApp/SMDemoSettings.xml | 839 +++++---- Demo/DemoConsoleApp/TimeSeriesGenerator.cs | 61 +- RCNet/CsvTools/CsvDataHolder.cs | 67 +- RCNet/CsvTools/DelimitedStringValues.cs | 158 +- .../Imgs/StateMachine_EntityRelationship.png | Bin 47644 -> 53285 bytes RCNet/Extensions/ArrayExtensions.cs | 126 +- RCNet/Extensions/DoubleArrayExtensions.cs | 40 +- RCNet/Extensions/DoubleExtensions.cs | 24 +- RCNet/Extensions/IntArrayExtensions.cs | 20 +- RCNet/Extensions/RandomExtensions.cs | 374 ++-- RCNet/Extensions/StringExtensions.cs | 94 +- RCNet/MathTools/BasicStat.cs | 309 ++-- RCNet/MathTools/BinDistribution.cs | 106 +- RCNet/MathTools/BinErrStat.cs | 87 +- RCNet/MathTools/Bitwise.cs | 111 +- RCNet/MathTools/Combinatorics.cs | 16 +- .../Differential/MovingDifferentiator.cs | 125 -- RCNet/MathTools/Differential/ODENumSolver.cs | 69 +- RCNet/MathTools/Discrete.cs | 54 +- RCNet/MathTools/EuclideanDistance.cs | 32 +- RCNet/MathTools/FnPoint.cs | 49 + RCNet/MathTools/Hurst/HurstExpEstim.cs | 58 +- .../{RescalledRange.cs => RescaledRange.cs} | 33 +- RCNet/MathTools/Interval.cs | 210 +-- RCNet/MathTools/LinearFit.cs | 53 +- RCNet/MathTools/MatrixMath/EVD.cs | 27 +- RCNet/MathTools/MatrixMath/LUD.cs | 37 +- RCNet/MathTools/MatrixMath/Matrix.cs | 429 ++--- RCNet/MathTools/MatrixMath/QRD.cs | 57 +- RCNet/MathTools/MatrixMath/SVD.cs | 53 +- RCNet/MathTools/MovingDataWindow.cs | 98 +- .../{PS/ParamSeeker.cs => ParamValFinder.cs} | 46 +- ...rSettings.cs => ParamValFinderSettings.cs} | 63 +- RCNet/MathTools/PhysUnit.cs | 207 +-- RCNet/MathTools/Point2D.cs | 47 - RCNet/MathTools/Probability/PMixer.cs | 39 +- ...reSelector.cs => ProbabilisticSelector.cs} | 49 +- RCNet/MathTools/VectorMath/Vector.cs | 111 +- RCNet/MathTools/WeightedAvg.cs | 98 +- RCNet/Neural/Activation/AFAnalogBase.cs | 42 +- .../Neural/Activation/AFAnalogBentIdentity.cs | 8 +- .../AFAnalogBentIdentitySettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogElliot.cs | 12 +- .../Activation/AFAnalogElliotSettings.cs | 29 +- RCNet/Neural/Activation/AFAnalogGaussian.cs | 8 +- .../Activation/AFAnalogGaussianSettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogISRU.cs | 12 +- .../Neural/Activation/AFAnalogISRUSettings.cs | 31 +- RCNet/Neural/Activation/AFAnalogIdentity.cs | 8 +- .../Activation/AFAnalogIdentitySettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogLeakyReLU.cs | 8 +- .../Activation/AFAnalogLeakyReLUSettings.cs | 23 +- RCNet/Neural/Activation/AFAnalogSQNL.cs | 4 +- .../Neural/Activation/AFAnalogSQNLSettings.cs | 10 +- RCNet/Neural/Activation/AFAnalogSigmoid.cs | 4 +- .../Activation/AFAnalogSigmoidSettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogSinc.cs | 4 +- .../Neural/Activation/AFAnalogSincSettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogSinusoid.cs | 4 +- .../Activation/AFAnalogSinusoidSettings.cs | 14 +- .../Activation/AFAnalogSoftExponential.cs | 6 +- .../AFAnalogSoftExponentialSettings.cs | 23 +- RCNet/Neural/Activation/AFAnalogSoftMax.cs | 10 +- .../Activation/AFAnalogSoftMaxSettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogSoftPlus.cs | 4 +- .../Activation/AFAnalogSoftPlusSettings.cs | 14 +- RCNet/Neural/Activation/AFAnalogTanH.cs | 4 +- .../Neural/Activation/AFAnalogTanHSettings.cs | 14 +- RCNet/Neural/Activation/AFSpikingAdExpIF.cs | 64 +- .../Activation/AFSpikingAdExpIFSettings.cs | 188 ++- .../AFSpikingAutoIzhikevichIFSettings.cs | 50 +- RCNet/Neural/Activation/AFSpikingBase.cs | 70 +- RCNet/Neural/Activation/AFSpikingExpIF.cs | 32 +- .../Activation/AFSpikingExpIFSettings.cs | 154 +- .../Activation/AFSpikingIzhikevichIF.cs | 30 +- .../AFSpikingIzhikevichIFSettings.cs | 136 +- RCNet/Neural/Activation/AFSpikingLeakyIF.cs | 26 +- .../Activation/AFSpikingLeakyIFSettings.cs | 118 +- RCNet/Neural/Activation/AFSpikingODE.cs | 44 +- RCNet/Neural/Activation/AFSpikingSimpleIF.cs | 18 +- .../Activation/AFSpikingSimpleIFSettings.cs | 84 +- RCNet/Neural/Activation/ActivationCommon.cs | 16 +- RCNet/Neural/Activation/ActivationFactory.cs | 55 +- RCNet/Neural/Activation/IActivation.cs | 10 +- .../Neural/Activation/IActivationSettings.cs | 2 +- .../Coders/AnalogToSpiking/A2SCoderBase.cs | 40 +- .../AnalogToSpiking/A2SCoderDownDirArrows.cs | 27 +- .../A2SCoderDownDirArrowsSettings.cs | 30 +- .../Coders/AnalogToSpiking/A2SCoderFactory.cs | 36 +- .../A2SCoderGaussianReceptors.cs | 23 +- .../A2SCoderGaussianReceptorsSettings.cs | 26 +- .../AnalogToSpiking/A2SCoderSignalStrength.cs | 36 +- .../A2SCoderSignalStrengthSettings.cs | 18 +- .../AnalogToSpiking/A2SCoderUpDirArrows.cs | 27 +- .../A2SCoderUpDirArrowsSettings.cs | 26 +- RCNet/Neural/Data/Filter/BinFeatureFilter.cs | 17 +- .../Data/Filter/BinFeatureFilterSettings.cs | 12 +- RCNet/Neural/Data/Filter/EnumFeatureFIlter.cs | 75 - .../Data/Filter/EnumFeatureFilterSettings.cs | 100 -- RCNet/Neural/Data/Filter/FeatureFilterBase.cs | 42 +- .../Data/Filter/FeatureFilterFactory.cs | 46 +- .../Data/Filter/IFeatureFilterSettings.cs | 12 +- RCNet/Neural/Data/Filter/RealFeatureFilter.cs | 28 +- .../Data/Filter/RealFeatureFilterSettings.cs | 28 +- .../Data/Generators/GeneratorFactory.cs | 23 +- RCNet/Neural/Data/Generators/IGenerator.cs | 6 +- .../Data/Generators/MackeyGlassGenerator.cs | 17 +- .../MackeyGlassGeneratorSettings.cs | 38 +- .../Neural/Data/Generators/PulseGenerator.cs | 37 +- .../Data/Generators/PulseGeneratorSettings.cs | 26 +- .../Neural/Data/Generators/RandomGenerator.cs | 20 +- .../Data/Generators/SinusoidalGenerator.cs | 15 +- .../Generators/SinusoidalGeneratorSettings.cs | 38 +- RCNet/Neural/Data/InputPattern.cs | 59 +- RCNet/Neural/Data/ResultBundle.cs | 31 +- .../Data/Transformers/CDivTransformer.cs | 19 +- .../Transformers/CDivTransformerSettings.cs | 24 +- .../Data/Transformers/DiffTransformer.cs | 13 +- .../Transformers/DiffTransformerSettings.cs | 46 +- .../Data/Transformers/DivTransformer.cs | 19 +- .../Transformers/DivTransformerSettings.cs | 20 +- .../Data/Transformers/ExpTransformer.cs | 18 +- .../Transformers/ExpTransformerSettings.cs | 24 +- .../Neural/Data/Transformers/ITransformer.cs | 8 +- .../Data/Transformers/LinearTransformer.cs | 18 +- .../Transformers/LinearTransformerSettings.cs | 36 +- .../Data/Transformers/LogTransformer.cs | 18 +- .../Transformers/LogTransformerSettings.cs | 24 +- .../Data/Transformers/MWStatTransformer.cs | 24 +- .../Transformers/MWStatTransformerSettings.cs | 52 +- .../Data/Transformers/MulTransformer.cs | 18 +- .../Transformers/MulTransformerSettings.cs | 20 +- .../Data/Transformers/PowerTransformer.cs | 20 +- .../Transformers/PowerTransformerSettings.cs | 32 +- .../Data/Transformers/TransformerFactory.cs | 72 +- .../Transformers/YeoJohnsonTransformer.cs | 29 +- .../YeoJohnsonTransformerSettings.cs | 20 +- RCNet/Neural/Data/VectorBundle.cs | 119 +- .../NonRecurrent/CrossvalidationSettings.cs | 38 +- .../NonRecurrent/FF/ElasticRegrTrainer.cs | 46 +- .../FF/ElasticRegrTrainerSettings.cs | 30 +- .../NonRecurrent/FF/FeedForwardNetwork.cs | 132 +- .../FF/FeedForwardNetworkSettings.cs | 31 +- .../NonRecurrent/FF/HiddenLayerSettings.cs | 24 +- .../NonRecurrent/FF/HiddenLayersSettings.cs | 22 +- .../Network/NonRecurrent/FF/QRDRegrTrainer.cs | 40 +- .../NonRecurrent/FF/QRDRegrTrainerSettings.cs | 63 +- .../Network/NonRecurrent/FF/RPropTrainer.cs | 41 +- .../NonRecurrent/FF/RPropTrainerSettings.cs | 94 +- .../NonRecurrent/FF/RidgeRegrTrainer.cs | 40 +- .../FF/RidgeRegrTrainerSettings.cs | 53 +- .../NonRecurrent/INonRecurrentNetwork.cs | 54 +- .../INonRecurrentNetworkSettings.cs | 2 +- .../INonRecurrentNetworkTrainer.cs | 20 +- .../ITNRNetClusterChainSettings.cs | 32 + .../NonRecurrent/ITNRNetClusterSettings.cs | 65 + .../NetworkClusterSecondLevelCompSettings.cs | 130 -- .../NonRecurrent/NonRecurrentNetUtils.cs | 247 ++- .../NonRecurrent/PP/PDeltaRuleTrainer.cs | 36 +- .../PP/PDeltaRuleTrainerSettings.cs | 62 +- .../NonRecurrent/PP/ParallelPerceptron.cs | 49 +- .../PP/ParallelPerceptronSettings.cs | 35 +- RCNet/Neural/Network/NonRecurrent/TNRNet.cs | 176 ++ .../Network/NonRecurrent/TNRNetBuilder.cs | 521 ++++++ .../Network/NonRecurrent/TNRNetCluster.cs | 447 +++++ .../NonRecurrent/TNRNetClusterBuilder.cs | 134 ++ .../NonRecurrent/TNRNetClusterChain.cs | 114 ++ .../NonRecurrent/TNRNetClusterChainBuilder.cs | 176 ++ ...TNRNetClusterChainProbabilisticSettings.cs | 114 ++ .../TNRNetClusterChainRealSettings.cs | 114 ++ .../TNRNetClusterChainSingleBoolSettings.cs | 114 ++ ...NetClusterProbabilisticNetworksSettings.cs | 140 ++ .../TNRNetClusterProbabilisticSettings.cs | 132 ++ ...RNetClusterProbabilisticWeightsSettings.cs | 253 +++ .../TNRNetClusterRealNetworksSettings.cs | 131 ++ .../NonRecurrent/TNRNetClusterRealSettings.cs | 132 ++ .../TNRNetClusterRealWeightsSettings.cs | 200 +++ ...TNRNetClusterSingleBoolNetworksSettings.cs | 119 ++ .../TNRNetClusterSingleBoolSettings.cs | 132 ++ .../TNRNetClusterSingleBoolWeightsSettings.cs | 253 +++ .../TNRNetClustersProbabilisticSettings.cs | 123 ++ .../TNRNetClustersRealSettings.cs | 123 ++ .../TNRNetClustersSingleBoolSettings.cs | 123 ++ .../Network/NonRecurrent/TrainedNetwork.cs | 134 -- .../NonRecurrent/TrainedNetworkBuilder.cs | 502 ------ .../NonRecurrent/TrainedNetworkCluster.cs | 585 ------- .../TrainedNetworkClusterBuilder.cs | 146 -- .../NonRecurrent/TrainedOneTakesAllNetwork.cs | 105 -- .../TrainedOneTakesAllNetworkBuilder.cs | 486 ------ .../TrainedOneTakesAllNetworkCluster.cs | 247 --- ...TrainedOneTakesAllNetworkClusterBuilder.cs | 125 -- .../SM/PM/AllowedInputFieldSettings.cs | 16 +- .../SM/PM/AllowedInputFieldsSettings.cs | 30 +- .../Network/SM/PM/AllowedPoolSettings.cs | 18 +- .../Network/SM/PM/AllowedPoolsSettings.cs | 32 +- .../Network/SM/PM/AllowedPredictorSettings.cs | 18 +- .../SM/PM/AllowedPredictorsSettings.cs | 30 +- RCNet/Neural/Network/SM/PM/MapperSettings.cs | 36 +- .../Network/SM/PM/ReadoutUnitMapSettings.cs | 58 +- .../Input/ExternalFieldSettings.cs | 26 +- .../Input/ExternalFieldsSettings.cs | 36 +- .../Input/FeedingContinuousSettings.cs | 22 +- .../Input/FeedingPatternedSettings.cs | 51 +- .../Input/GeneratedFieldSettings.cs | 32 +- .../Input/GeneratedFieldsSettings.cs | 35 +- .../Preprocessing/Input/IFeedingSettings.cs | 2 +- .../SM/Preprocessing/Input/InputEncoder.cs | 196 ++- .../Input/InputEncoderSettings.cs | 32 +- .../SM/Preprocessing/Input/InputField.cs | 73 +- .../Preprocessing/Input/InputSpikesCoder.cs | 55 +- .../Input/InputSpikesCoderSettings.cs | 70 +- .../Preprocessing/Input/ResamplingSettings.cs | 46 +- .../Input/SteadyFieldSettings.cs | 24 +- .../Input/SteadyFieldsSettings.cs | 36 +- .../Input/TransformedFieldSettings.cs | 32 +- .../Input/TransformedFieldsSettings.cs | 47 +- .../Input/UnificationSettings.cs | 30 +- .../Input/VaryingFieldsSettings.cs | 58 +- .../SM/Preprocessing/NeuralPreprocessor.cs | 204 ++- .../NeuralPreprocessorSettings.cs | 40 +- .../Preprocessing/Neuron/AnalogInputNeuron.cs | 17 +- .../SM/Preprocessing/Neuron/HiddenNeuron.cs | 128 +- .../SM/Preprocessing/Neuron/INeuron.cs | 46 +- .../SM/Preprocessing/Neuron/NeuronCommon.cs | 27 +- .../SM/Preprocessing/Neuron/NeuronLocation.cs | 54 +- .../Preprocessing/Neuron/NeuronOutputData.cs | 23 +- .../Preprocessing/Neuron/NeuronStatistics.cs | 47 +- .../Neuron/Predictor/IPredictor.cs | 37 +- .../Neuron/Predictor/IPredictorSettings.cs | 14 +- .../Neuron/Predictor/PredictorActivation.cs | 15 +- .../PredictorActivationDiffLinWAvg.cs | 25 +- .../PredictorActivationDiffLinWAvgSettings.cs | 29 +- ...> PredictorActivationDiffRescaledRange.cs} | 27 +- ...torActivationDiffRescaledRangeSettings.cs} | 37 +- ...s => PredictorActivationDiffStatFigure.cs} | 31 +- ...dictorActivationDiffStatFigureSettings.cs} | 56 +- .../Predictor/PredictorActivationLinWAvg.cs | 25 +- .../PredictorActivationLinWAvgSettings.cs | 29 +- .../Predictor/PredictorActivationPower.cs | 15 +- .../PredictorActivationPowerSettings.cs | 30 +- ...cs => PredictorActivationRescaledRange.cs} | 27 +- ...edictorActivationRescaledRangeSettings.cs} | 37 +- .../Predictor/PredictorActivationSettings.cs | 15 +- ...re.cs => PredictorActivationStatFigure.cs} | 31 +- ... PredictorActivationStatFigureSettings.cs} | 58 +- .../Neuron/Predictor/PredictorDescriptor.cs | 26 +- .../Neuron/Predictor/PredictorFactory.cs | 54 +- .../Neuron/Predictor/PredictorFiringTrace.cs | 19 +- .../Predictor/PredictorFiringTraceSettings.cs | 39 +- .../Neuron/Predictor/PredictorsProvider.cs | 81 +- .../Predictor/PredictorsProviderSettings.cs | 27 +- .../Neuron/SpikingInputNeuron.cs | 12 +- .../Reservoir/InputConnSettings.cs | 34 +- .../Reservoir/InputConnsSettings.cs | 26 +- .../Reservoir/InterPoolConnSettings.cs | 36 +- .../Reservoir/InterPoolConnsSettings.cs | 26 +- .../Reservoir/Pool/ChainSchemaSettings.cs | 48 +- .../Pool/DoubleTwistedToroidSchemaSettings.cs | 48 +- .../Reservoir/Pool/EmptySchemaSettings.cs | 13 +- .../Pool/IInterconnSchemaSettings.cs | 4 +- .../Reservoir/Pool/InterconnSettings.cs | 30 +- .../NeuronGroup/AnalogNeuronGroupSettings.cs | 49 +- .../HomogenousExcitabilitySettings.cs | 38 +- .../Pool/NeuronGroup/INeuronGroupSettings.cs | 16 +- .../Pool/NeuronGroup/NeuronGroupsSettings.cs | 41 +- .../Pool/NeuronGroup/RetainmentSettings.cs | 22 +- .../NeuronGroup/SpikingNeuronGroupSettings.cs | 28 +- .../Reservoir/Pool/PoolSettings.cs | 35 +- .../Reservoir/Pool/RandomSchemaSettings.cs | 68 +- .../Preprocessing/Reservoir/PoolsSettings.cs | 36 +- .../Reservoir/ReservoirInstance.cs | 203 ++- .../Reservoir/ReservoirInstanceSettings.cs | 37 +- .../Reservoir/ReservoirInstancesSettings.cs | 34 +- .../Preprocessing/Reservoir/ReservoirStat.cs | 200 +-- .../Reservoir/ReservoirStructureSettings.cs | 26 +- .../Reservoir/ReservoirStructuresSettings.cs | 34 +- .../Reservoir/Space3D/CoordinatesSettings.cs | 42 +- .../Reservoir/Space3D/ProportionsSettings.cs | 26 +- .../Reservoir/Synapse/AnalogSourceSettings.cs | 22 +- .../ConstantDynamicsATIndifferentSettings.cs | 16 +- .../ConstantDynamicsATInputSettings.cs | 16 +- .../ConstantDynamicsSTExcitatorySettings.cs | 16 +- .../ConstantDynamicsSTInhibitorySettings.cs | 16 +- .../ConstantDynamicsSTInputSettings.cs | 16 +- .../Synapse/ConstantDynamicsSettings.cs | 12 +- .../Reservoir/Synapse/ConstantEfficacy.cs | 6 +- .../Reservoir/Synapse/IDynamicsSettings.cs | 2 +- .../Reservoir/Synapse/IEfficacy.cs | 6 +- .../LinearDynamicsATIndifferentSettings.cs | 28 +- .../Synapse/LinearDynamicsATInputSettings.cs | 30 +- .../LinearDynamicsSTExcitatorySettings.cs | 28 +- .../LinearDynamicsSTInhibitorySettings.cs | 28 +- .../Synapse/LinearDynamicsSTInputSettings.cs | 28 +- .../Synapse/LinearDynamicsSettings.cs | 20 +- .../Reservoir/Synapse/LinearEfficacy.cs | 8 +- .../NonlinearDynamicsATIndifferentSettings.cs | 30 +- .../NonlinearDynamicsATInputSettings.cs | 28 +- .../NonlinearDynamicsSTExcitatorySettings.cs | 30 +- .../NonlinearDynamicsSTInhibitorySettings.cs | 30 +- .../NonlinearDynamicsSTInputSettings.cs | 30 +- .../Synapse/NonlinearDynamicsSettings.cs | 22 +- .../Reservoir/Synapse/NonlinearEfficacy.cs | 8 +- .../PlasticityATIndifferentSettings.cs | 15 +- .../Synapse/PlasticityATInputSettings.cs | 13 +- .../Reservoir/Synapse/PlasticityCommon.cs | 36 +- .../Synapse/PlasticitySTExcitatorySettings.cs | 14 +- .../Synapse/PlasticitySTInhibitorySettings.cs | 12 +- .../Synapse/PlasticitySTInputSettings.cs | 12 +- .../SpikingSourceATIndifferentSettings.cs | 28 +- .../Synapse/SpikingSourceATInputSettings.cs | 28 +- .../SpikingSourceSTExcitatorySettings.cs | 28 +- .../SpikingSourceSTInhibitorySettings.cs | 28 +- .../Synapse/SpikingSourceSTInputSettings.cs | 28 +- .../Reservoir/Synapse/Synapse.cs | 85 +- .../Synapse/SynapseATIndifferentSettings.cs | 40 +- .../Synapse/SynapseATInputSettings.cs | 40 +- .../Reservoir/Synapse/SynapseATSettings.cs | 36 +- .../Synapse/SynapseSTExcitatorySettings.cs | 48 +- .../Synapse/SynapseSTInhibitorySettings.cs | 48 +- .../Synapse/SynapseSTInputSettings.cs | 38 +- .../Reservoir/Synapse/SynapseSTSettings.cs | 30 +- .../Reservoir/Synapse/SynapseSettings.cs | 24 +- .../Readout/ClassificationNetworksSettings.cs | 124 -- .../SM/Readout/ClassificationTaskSettings.cs | 72 +- .../Network/SM/Readout/ClusterSettings.cs | 137 -- .../Network/SM/Readout/CompositeResult.cs | 90 + .../SM/Readout/DefaultNetworksSettings.cs | 131 -- .../SM/Readout/ForecastNetworksSettings.cs | 129 -- .../SM/Readout/ForecastTaskSettings.cs | 73 +- .../Readout/IOneTakesAllDecisionSettings.cs | 23 + .../Network/SM/Readout/ITaskSettings.cs | 15 +- .../OneTakesAllBasicDecisionSettings.cs | 89 + ...OneTakesAllClusterChainDecisionSettings.cs | 148 ++ .../Network/SM/Readout/OneTakesAllGroup.cs | 231 +++ .../SM/Readout/OneTakesAllGroupSettings.cs | 120 ++ .../SM/Readout/OneTakesAllGroupsSettings.cs | 158 ++ .../SM/Readout/OneWinnerDecisionMaker.cs | 18 - .../Readout/OneWinnerDecisionMakerSettings.cs | 133 -- .../Network/SM/Readout/PredictorsMapper.cs | 39 +- .../Neural/Network/SM/Readout/ReadoutLayer.cs | 486 +++--- .../SM/Readout/ReadoutLayerSettings.cs | 183 +- .../Neural/Network/SM/Readout/ReadoutUnit.cs | 129 +- .../Network/SM/Readout/ReadoutUnitSettings.cs | 28 +- .../SM/Readout/ReadoutUnitsSettings.cs | 118 +- .../SM/Readout/TaskDefaultsSettings.cs | 119 ++ RCNet/Neural/Network/SM/StateMachine.cs | 358 ++-- .../Neural/Network/SM/StateMachineDesigner.cs | 345 ++-- .../Neural/Network/SM/StateMachineSettings.cs | 39 +- RCNet/Queue/SimpleQueue.cs | 77 +- RCNet/RCNet.csproj | 1 - RCNet/RCNetBaseSettings.cs | 44 +- RCNet/RCNetTypes.xsd | 1498 +++++++++++------ RCNet/RandomValue/ExponentialDistrSettings.cs | 18 +- RCNet/RandomValue/GammaDistrSettings.cs | 22 +- RCNet/RandomValue/GaussianDistrSettings.cs | 26 +- RCNet/RandomValue/IDistrSettings.cs | 2 +- RCNet/RandomValue/RandomCommon.cs | 34 +- RCNet/RandomValue/RandomValueSettings.cs | 104 +- .../RandomValue/UExponentialDistrSettings.cs | 18 +- RCNet/RandomValue/UGaussianDistrSettings.cs | 26 +- RCNet/RandomValue/URandomValueSettings.cs | 84 +- RCNet/RandomValue/UniformDistrSettings.cs | 14 +- RCNet/XmlTools/DocValidator.cs | 30 +- Readme.md | 171 +- 377 files changed, 14342 insertions(+), 11979 deletions(-) delete mode 100644 RCNet/MathTools/Differential/MovingDifferentiator.cs create mode 100644 RCNet/MathTools/FnPoint.cs rename RCNet/MathTools/Hurst/{RescalledRange.cs => RescaledRange.cs} (65%) rename RCNet/MathTools/{PS/ParamSeeker.cs => ParamValFinder.cs} (80%) rename RCNet/MathTools/{PS/ParamSeekerSettings.cs => ParamValFinderSettings.cs} (67%) delete mode 100644 RCNet/MathTools/Point2D.cs rename RCNet/MathTools/Probability/{RelShareSelector.cs => ProbabilisticSelector.cs} (69%) delete mode 100644 RCNet/Neural/Data/Filter/EnumFeatureFIlter.cs delete mode 100644 RCNet/Neural/Data/Filter/EnumFeatureFilterSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/ITNRNetClusterChainSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/ITNRNetClusterSettings.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/NetworkClusterSecondLevelCompSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNet.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetBuilder.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetCluster.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterBuilder.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterChain.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterChainBuilder.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterChainProbabilisticSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterChainRealSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterChainSingleBoolSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterProbabilisticNetworksSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterProbabilisticSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterProbabilisticWeightsSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterRealNetworksSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterRealSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterRealWeightsSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterSingleBoolNetworksSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterSingleBoolSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClusterSingleBoolWeightsSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClustersProbabilisticSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClustersRealSettings.cs create mode 100644 RCNet/Neural/Network/NonRecurrent/TNRNetClustersSingleBoolSettings.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedNetwork.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedNetworkBuilder.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedNetworkCluster.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedNetworkClusterBuilder.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedOneTakesAllNetwork.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedOneTakesAllNetworkBuilder.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedOneTakesAllNetworkCluster.cs delete mode 100644 RCNet/Neural/Network/NonRecurrent/TrainedOneTakesAllNetworkClusterBuilder.cs rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationRescalledRange.cs => PredictorActivationDiffRescaledRange.cs} (58%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationDiffRescalledRangeSettings.cs => PredictorActivationDiffRescaledRangeSettings.cs} (71%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationDiffStatFeature.cs => PredictorActivationDiffStatFigure.cs} (60%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationDiffStatFeatureSettings.cs => PredictorActivationDiffStatFigureSettings.cs} (65%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationDiffRescalledRange.cs => PredictorActivationRescaledRange.cs} (60%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationRescalledRangeSettings.cs => PredictorActivationRescaledRangeSettings.cs} (71%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationStatFeature.cs => PredictorActivationStatFigure.cs} (62%) rename RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/{PredictorActivationStatFeatureSettings.cs => PredictorActivationStatFigureSettings.cs} (65%) delete mode 100644 RCNet/Neural/Network/SM/Readout/ClassificationNetworksSettings.cs delete mode 100644 RCNet/Neural/Network/SM/Readout/ClusterSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/CompositeResult.cs delete mode 100644 RCNet/Neural/Network/SM/Readout/DefaultNetworksSettings.cs delete mode 100644 RCNet/Neural/Network/SM/Readout/ForecastNetworksSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/IOneTakesAllDecisionSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/OneTakesAllBasicDecisionSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/OneTakesAllClusterChainDecisionSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/OneTakesAllGroup.cs create mode 100644 RCNet/Neural/Network/SM/Readout/OneTakesAllGroupSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/OneTakesAllGroupsSettings.cs delete mode 100644 RCNet/Neural/Network/SM/Readout/OneWinnerDecisionMaker.cs delete mode 100644 RCNet/Neural/Network/SM/Readout/OneWinnerDecisionMakerSettings.cs create mode 100644 RCNet/Neural/Network/SM/Readout/TaskDefaultsSettings.cs diff --git a/Demo/DemoConsoleApp/Examples/ExampleBase.cs b/Demo/DemoConsoleApp/Examples/ExampleBase.cs index f52ff4a..6c1371e 100644 --- a/Demo/DemoConsoleApp/Examples/ExampleBase.cs +++ b/Demo/DemoConsoleApp/Examples/ExampleBase.cs @@ -1,17 +1,17 @@ -using System; -using Demo.DemoConsoleApp.Log; +using Demo.DemoConsoleApp.Log; using RCNet.CsvTools; using RCNet.Neural.Data; using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.SM; using RCNet.Neural.Network.SM.Preprocessing; using RCNet.Neural.Network.SM.Preprocessing.Input; +using RCNet.Neural.Network.SM.Readout; namespace Demo.DemoConsoleApp.Examples { /// - /// Base class of the implemented examples + /// Implements the base class of the examples. /// public class ExampleBase { @@ -28,10 +28,10 @@ protected ExampleBase() //Methods //Event handlers /// - /// Displays information about the verification progress. + /// Displays an information about the verification progress. /// - /// Total number of inputs to be processed - /// Number of processed inputs + /// The total number of inputs to be processed. + /// The number of already processed inputs. protected void OnVerificationProgressChanged(int totalNumOfInputs, int numOfProcessedInputs) { //Display progress @@ -43,15 +43,15 @@ protected void OnVerificationProgressChanged(int totalNumOfInputs, int numOfProc } /// - /// Displays information about the preprocessing progress and at the end displays important NeuralPreprocessor's statistics. + /// Displays an information about the preprocessing progress and at the end displays important NeuralPreprocessor's statistics. /// - /// Total number of inputs to be processed - /// Number of processed inputs - /// Final overview of the preprocessing phase + /// The total number of inputs to be processed. + /// The number of already processed inputs. + /// The final overview of the preprocessing. protected void OnPreprocessingProgressChanged(int totalNumOfInputs, - int numOfProcessedInputs, - NeuralPreprocessor.PreprocessingOverview finalPreprocessingOverview - ) + int numOfProcessedInputs, + NeuralPreprocessor.PreprocessingOverview finalPreprocessingOverview + ) { if (finalPreprocessingOverview == null) { @@ -72,39 +72,38 @@ NeuralPreprocessor.PreprocessingOverview finalPreprocessingOverview } /// - /// Displays information about the readout unit regression progress. + /// Displays information about the build process progress. /// - /// Current state of the regression process - /// Indicates that the best readout unit was changed as a result of the performed epoch - protected void OnRegressionEpochDone(TrainedNetworkBuilder.BuildingState buildingState, bool foundBetter) + /// The current state of the build process. + /// Indicates that the best network so far was found during the last performed epoch. + protected void OnEpochDone(TNRNetBuilder.BuildProgress buildProgress, bool foundBetter) { int reportEpochsInterval = 5; //Progress info if (foundBetter || - (buildingState.Epoch % reportEpochsInterval) == 0 || - buildingState.Epoch == buildingState.MaxEpochs || - (buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1) + (buildProgress.Epoch % reportEpochsInterval) == 0 || + buildProgress.Epoch == buildProgress.MaxEpochs || + (buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1) ) { //Build progress report message - string progressText = buildingState.GetProgressInfo(4); + string progressText = buildProgress.GetInfo(4); //Report the progress - _log.Write(progressText, !(buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1)); + _log.Write(progressText, !(buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1)); } return; } /// - /// Loads given file and executes StateMachine training. - /// This version of function requires configured NeuralPreprocessor. + /// Loads the specified file and executes the StateMachine training. /// - /// Instance of StateMachine to be trained - /// Name of the csv file containing training data - /// Returned vector to be used for next prediction (relevant only in case of continuous feeding of the input) + /// An instance of StateMachine to be trained. + /// The name of the csv file containing the training data. + /// The vector to be used for next prediction (relevant only in case of continuous feeding of the input). protected void TrainStateMachine(StateMachine stateMachine, string trainingDataFileName, out double[] predictionInputVector) { - //Register to RegressionEpochDone event - stateMachine.RL.RegressionEpochDone += OnRegressionEpochDone; + //Register to EpochDone event + stateMachine.RL.EpochDone += OnEpochDone; //Load csv data CsvDataHolder trainingCsvData = new CsvDataHolder(trainingDataFileName); //Convert csv data to VectorBundle useable for StateMachine training @@ -153,13 +152,12 @@ out predictionInputVector } /// - /// Loads given file and executes StateMachine verification. - /// This version of function requires configured NeuralPreprocessor. + /// Loads the specified file and executes the StateMachine verification. /// - /// Instance of StateMachine to be trained - /// Name of the csv file containing verification data - /// Remaining input vector from training phase (relevant only in case of continuous feeding of the input) - /// Returned vector to be used for next prediction (relevant only in case of continuous feeding of the input) + /// An instance of StateMachine to be verified. + /// The name of the csv file containing the verification data. + /// Remaining input vector from training phase (relevant only in case of continuous feeding of the input). + /// The vector to be used for next prediction (relevant only in case of continuous feeding of the input). protected void VerifyStateMachine(StateMachine stateMachine, string verificationDataFileName, double[] omittedInputVector, out double[] predictionInputVector) { //Load csv data @@ -175,7 +173,7 @@ protected void VerifyStateMachine(StateMachine stateMachine, string verification //Continuous input feeding //Last known input values from training (predictionInputVector) must be pushed into the reservoirs to keep time series continuity //(first input data in verification.csv is output of the last data in training.csv) - double[] tmp = stateMachine.Compute(omittedInputVector); + double[] tmp = stateMachine.Compute(omittedInputVector, out ReadoutLayer.ReadoutData readoutData); //Load verification data and get new predictionInputVector for final prediction verificationData = VectorBundle.Load(verificationCsvData, stateMachine.Config.NeuralPreprocessorCfg.InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.GetFieldNames(), diff --git a/Demo/DemoConsoleApp/Examples/FFNetBoolAlg.cs b/Demo/DemoConsoleApp/Examples/FFNetBoolAlg.cs index 635b6ad..6439e89 100644 --- a/Demo/DemoConsoleApp/Examples/FFNetBoolAlg.cs +++ b/Demo/DemoConsoleApp/Examples/FFNetBoolAlg.cs @@ -1,20 +1,22 @@ -using System; -using System.Globalization; -using RCNet.Neural.Activation; +using RCNet.Neural.Activation; using RCNet.Neural.Data; using RCNet.Neural.Network.NonRecurrent.FF; +using System; +using System.Globalization; namespace Demo.DemoConsoleApp.Examples { /// - /// This "Hello world" example shows how to use implemented FF network as the independent component. - /// It trains the multilayer Feed Forward network to solve AND, OR and XOR. + /// Trains a multilayer Feed Forward network to solve AND, OR and XOR. /// + /// + /// This "Hello world" example shows how to use the feed forward network component independently. + /// public class FFNetBoolAlg : ExampleBase { /// - /// Creates training data. - /// Input vector contains 0/1 combination and output vector contains appropriate results of the AND, OR and XOR operation + /// Creates the training data. + /// Input vector contains 0/1 combination and output vector contains appropriate results of the AND, OR and XOR operation. /// private VectorBundle CreateTrainingData() { @@ -48,7 +50,7 @@ public void Run() //Training _log.Write("Training"); _log.Write("--------"); - //Create trainer instance + //Create the trainer instance RPropTrainer trainer = new RPropTrainer(ffNet, trainingData.InputVectorCollection, trainingData.OutputVectorCollection, @@ -63,7 +65,7 @@ public void Run() _log.Write(string.Empty); //Training is done - //Display network computation results + //Display the network computation results _log.Write("Trained network computations:"); _log.Write("-----------------------------"); foreach (double[] input in trainingData.InputVectorCollection) diff --git a/Demo/DemoConsoleApp/Examples/LibrasClassificationESNDesigner.cs b/Demo/DemoConsoleApp/Examples/LibrasClassificationESNDesigner.cs index ae7610b..a1a4434 100644 --- a/Demo/DemoConsoleApp/Examples/LibrasClassificationESNDesigner.cs +++ b/Demo/DemoConsoleApp/Examples/LibrasClassificationESNDesigner.cs @@ -1,5 +1,4 @@ -using System; -using RCNet.Neural.Activation; +using RCNet.Neural.Activation; using RCNet.Neural.Data.Filter; using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.SM; @@ -7,11 +6,12 @@ using RCNet.Neural.Network.SM.Preprocessing.Input; using RCNet.Neural.Network.SM.Preprocessing.Neuron.Predictor; using RCNet.Neural.Network.SM.Readout; +using System; namespace Demo.DemoConsoleApp.Examples { /// - /// Example code shows how to setup StateMachine as a pure ESN for multivariate timeseries classification using StateMachineDesigner. + /// Example code shows how to use StateMachineDesigner and setup StateMachine as a pure ESN for multivariate timeseries classification. /// Example uses LibrasMovement_train.csv and LibrasMovement_verify.csv from ./Data subfolder. /// The dataset is from "Anthony Bagnall, Jason Lines, William Vickers and Eamonn Keogh, The UEA & UCR Time Series Classification Repository, www.timeseriesclassification.com" /// https://timeseriesclassification.com/description.php?Dataset=Libras @@ -44,9 +44,9 @@ public void Run() ); //Simplified readout layer configuration ReadoutLayerSettings readoutCfg = StateMachineDesigner.CreateClassificationReadoutCfg(new CrossvalidationSettings(0.0825d, CrossvalidationSettings.AutoFolds, 1), - StateMachineDesigner.CreateSingleLayerRegrNet(new AFAnalogIdentitySettings(), 5, 400), + StateMachineDesigner.CreateSingleLayerFFNetCfg(new AFAnalogIdentitySettings(), 5, 400), + 1, "Hand movement", - new NetworkClusterSecondLevelCompSettings(new CrossvalidationSettings(0.25d, CrossvalidationSettings.AutoFolds, 2), StateMachineDesigner.CreateMultiLayerRegrNet(10, new AFAnalogLeakyReLUSettings(), 1, 5, 400)), "curved swing", "horizontal swing", "vertical swing", diff --git a/Demo/DemoConsoleApp/Examples/LibrasClassificationLSMDesigner.cs b/Demo/DemoConsoleApp/Examples/LibrasClassificationLSMDesigner.cs index d17dd60..a712369 100644 --- a/Demo/DemoConsoleApp/Examples/LibrasClassificationLSMDesigner.cs +++ b/Demo/DemoConsoleApp/Examples/LibrasClassificationLSMDesigner.cs @@ -1,5 +1,4 @@ -using System; -using RCNet.Neural.Activation; +using RCNet.Neural.Activation; using RCNet.Neural.Data.Coders.AnalogToSpiking; using RCNet.Neural.Data.Filter; using RCNet.Neural.Network.NonRecurrent; @@ -9,12 +8,13 @@ using RCNet.Neural.Network.SM.Preprocessing.Neuron.Predictor; using RCNet.Neural.Network.SM.Preprocessing.Reservoir.Pool.NeuronGroup; using RCNet.Neural.Network.SM.Readout; +using System; namespace Demo.DemoConsoleApp.Examples { /// - /// Example code shows how to setup StateMachine as a pure LSM for classification using StateMachineDesigner and various - /// ways of input encoding for LSM spiking hidden neurons. + /// Example code shows how to use StateMachineDesigner to setup StateMachine as a pure LSM for classification. + /// Example also shows various ways of input spikes coding. /// Example uses LibrasMovement_train.csv and LibrasMovement_verify.csv from ./Data subfolder. /// The dataset is from "Anthony Bagnall, Jason Lines, William Vickers and Eamonn Keogh, The UEA & UCR Time Series Classification Repository, www.timeseriesclassification.com" /// https://timeseriesclassification.com/description.php?Dataset=Libras @@ -35,22 +35,21 @@ public class LibrasClassificationLSMDesigner : ExampleBase /// /// Runs the example code. /// - public void Run(InputEncoder.SpikingInputEncodingRegime spikesEncodingRegime) + public void Run(InputEncoder.InputSpikesCoding spikesCoding) { //Create StateMachine configuration //Simplified input configuration and homogenous excitability InputEncoderSettings inputCfg; HomogenousExcitabilitySettings homogenousExcitability; - switch (spikesEncodingRegime) + switch (spikesCoding) { /* - * Horizontal spikes encoding means that every spike position in the spike-train has related its own input neuron. - * So all the spikes are encoded at once, during one reservoir computation cycle. + * Horizontal coding. */ - case InputEncoder.SpikingInputEncodingRegime.Horizontal: + case InputEncoder.InputSpikesCoding.Horizontal: inputCfg = StateMachineDesigner.CreateInputCfg(new FeedingPatternedSettings(1, NeuralPreprocessor.BidirProcessing.Continuous, RCNet.Neural.Data.InputPattern.VariablesSchema.Groupped, new UnificationSettings(false, false)), //136 spiking input neurons per input field - coding at once - new InputSpikesCoderSettings(InputEncoder.SpikingInputEncodingRegime.Horizontal, + new InputSpikesCoderSettings(InputEncoder.InputSpikesCoding.Horizontal, new A2SCoderSignalStrengthSettings(8), //8 neurons (spike-train length = 1) new A2SCoderUpDirArrowsSettings(8, 8), //64 neurons (spike-train length = 1) new A2SCoderDownDirArrowsSettings(8, 8) //64 neurons (spike-train length = 1) @@ -59,16 +58,15 @@ public void Run(InputEncoder.SpikingInputEncodingRegime spikesEncodingRegime) new ExternalFieldSettings("coord_abcissa", new RealFeatureFilterSettings(), true), new ExternalFieldSettings("coord_ordinate", new RealFeatureFilterSettings(), true) ); - homogenousExcitability = new HomogenousExcitabilitySettings(1d, 0.7d, 0.2d); + homogenousExcitability = new HomogenousExcitabilitySettings(1d, 0.7d, 0.2d); break; /* - * Vertical spikes encoding means that every coder generating spike-train has related its own input neuron. - * So all the spikes are encoded in several reservoir computation cycles, depending on largest coder's code (number of code time-points). + * Vertical coding. */ - case InputEncoder.SpikingInputEncodingRegime.Vertical: + case InputEncoder.InputSpikesCoding.Vertical: inputCfg = StateMachineDesigner.CreateInputCfg(new FeedingPatternedSettings(1, NeuralPreprocessor.BidirProcessing.Continuous, RCNet.Neural.Data.InputPattern.VariablesSchema.Groupped), //17 spiking input neurons per input field- coding in 10 cycles - new InputSpikesCoderSettings(InputEncoder.SpikingInputEncodingRegime.Vertical, + new InputSpikesCoderSettings(InputEncoder.InputSpikesCoding.Vertical, new A2SCoderSignalStrengthSettings(10), //1 neuron (spike-train length = 10) new A2SCoderUpDirArrowsSettings(8, 10), //8 neurons (spike-train length = 10) new A2SCoderDownDirArrowsSettings(8, 10) //8 neurons (spike-train length = 10) @@ -80,13 +78,12 @@ public void Run(InputEncoder.SpikingInputEncodingRegime spikesEncodingRegime) homogenousExcitability = new HomogenousExcitabilitySettings(1d, 0.7d, 0.2d); break; /* - * Forbidden spikes encoding means no input spikes. Analog values from input fields are directly routed through synapses to hidden neurons. - * So all the input values are encoded at once, during one reservoir computation cycle. + * Forbidden - no spikes coding. */ default: //1 analog input neuron per input field inputCfg = StateMachineDesigner.CreateInputCfg(new FeedingPatternedSettings(1, NeuralPreprocessor.BidirProcessing.Continuous, RCNet.Neural.Data.InputPattern.VariablesSchema.Groupped, new UnificationSettings(false, false)), - new InputSpikesCoderSettings(InputEncoder.SpikingInputEncodingRegime.Forbidden), + new InputSpikesCoderSettings(InputEncoder.InputSpikesCoding.Forbidden), true, //Route the input pattern as the predictors to a readout layer new ExternalFieldSettings("coord_abcissa", new RealFeatureFilterSettings(), true), new ExternalFieldSettings("coord_ordinate", new RealFeatureFilterSettings(), true) @@ -97,9 +94,9 @@ public void Run(InputEncoder.SpikingInputEncodingRegime spikesEncodingRegime) //Simplified readout layer configuration ReadoutLayerSettings readoutCfg = StateMachineDesigner.CreateClassificationReadoutCfg(new CrossvalidationSettings(0.0825d, 0, 1), - StateMachineDesigner.CreateMultiLayerRegrNet(10, new AFAnalogLeakyReLUSettings(), 2, 5, 400), + StateMachineDesigner.CreateMultiLayerFFNetCfg(10, new AFAnalogLeakyReLUSettings(), 2, 5, 400), + 1, "Hand movement", - null, "curved swing", "horizontal swing", "vertical swing", diff --git a/Demo/DemoConsoleApp/Examples/LibrasClassificationNPBypassedDesigner.cs b/Demo/DemoConsoleApp/Examples/LibrasClassificationNPBypassedDesigner.cs index 12cc5e8..98fc917 100644 --- a/Demo/DemoConsoleApp/Examples/LibrasClassificationNPBypassedDesigner.cs +++ b/Demo/DemoConsoleApp/Examples/LibrasClassificationNPBypassedDesigner.cs @@ -1,14 +1,8 @@ -using System; -using RCNet.Neural.Activation; -using RCNet.Neural.Data.Coders.AnalogToSpiking; -using RCNet.Neural.Data.Filter; +using RCNet.Neural.Activation; using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.SM; -using RCNet.Neural.Network.SM.Preprocessing; -using RCNet.Neural.Network.SM.Preprocessing.Input; -using RCNet.Neural.Network.SM.Preprocessing.Neuron.Predictor; -using RCNet.Neural.Network.SM.Preprocessing.Reservoir.Pool.NeuronGroup; using RCNet.Neural.Network.SM.Readout; +using System; namespace Demo.DemoConsoleApp.Examples { @@ -39,12 +33,9 @@ public void Run() //Create StateMachine configuration //Simplified readout layer configuration using FF-network having 2 hidden layers as the classifier ReadoutLayerSettings readoutCfg = StateMachineDesigner.CreateClassificationReadoutCfg(new CrossvalidationSettings(0.0825d, 0, 1), - StateMachineDesigner.CreateMultiLayerRegrNet(10, new AFAnalogLeakyReLUSettings(), 2, 5, 400), + StateMachineDesigner.CreateMultiLayerFFNetCfg(10, new AFAnalogLeakyReLUSettings(), 2, 5, 400), + 2, "Hand movement", - new NetworkClusterSecondLevelCompSettings(new CrossvalidationSettings(0.25d, CrossvalidationSettings.AutoFolds, 2), - StateMachineDesigner.CreateMultiLayerRegrNet(10, new AFAnalogLeakyReLUSettings(), 1, 5, 400), - TrainedNetworkCluster.SecondLevelCompMode.SecondLevelOutputOnly - ), "curved swing", "horizontal swing", "vertical swing", @@ -64,7 +55,7 @@ public void Run() //Create designer instance StateMachineDesigner smd = new StateMachineDesigner(readoutCfg); //Create StateMachine configuration without preprocessing - StateMachineSettings stateMachineCfg = smd.CreateBypassedCfg(); + StateMachineSettings stateMachineCfg = smd.CreateBypassedPreprocessingCfg(); //Display StateMachine xml configuration string xmlConfig = stateMachineCfg.GetXml(true).ToString(); diff --git a/Demo/DemoConsoleApp/Examples/TTOOForecastDesigner.cs b/Demo/DemoConsoleApp/Examples/TTOOForecastDesigner.cs index 9de631e..1543772 100644 --- a/Demo/DemoConsoleApp/Examples/TTOOForecastDesigner.cs +++ b/Demo/DemoConsoleApp/Examples/TTOOForecastDesigner.cs @@ -1,17 +1,16 @@ -using System; -using RCNet.Neural.Activation; -using RCNet.Neural.Data.Coders.AnalogToSpiking; +using RCNet.Neural.Activation; using RCNet.Neural.Data.Filter; using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.SM; using RCNet.Neural.Network.SM.Preprocessing.Input; using RCNet.Neural.Network.SM.Preprocessing.Neuron.Predictor; using RCNet.Neural.Network.SM.Readout; +using System; namespace Demo.DemoConsoleApp.Examples { /// - /// Example code shows how to setup StateMachine using StateMachineDesigner. + /// Example code shows how to use StateMachineDesigner and setup StateMachine as a pure ESN for multivariate timeseries forecast. /// Example uses TTOO.csv from ./Data subfolder. /// Time series contains real share prices of TTOO title from https://finance.yahoo.com/quote/TTOO/history?p=TTOO. /// The last recorded prices are from 2018/03/02 so StateMachine is predicting next High and Low prices for the following @@ -35,8 +34,8 @@ public void Run() ); //Simplified readout layer configuration ReadoutLayerSettings readoutCfg = StateMachineDesigner.CreateForecastReadoutCfg(new CrossvalidationSettings(0.1d, 0, 1), - StateMachineDesigner.CreateSingleLayerRegrNet(new AFAnalogIdentitySettings(), 2, 1000), - null, + StateMachineDesigner.CreateSingleLayerFFNetCfg(new AFAnalogIdentitySettings(), 2, 1000), + 1, "High", "Low" ); @@ -76,9 +75,9 @@ public void Run() TrainStateMachine(stateMachine, "./Data/TTOO.csv", out double[] predictionInputVector); //Forecasting - ReadoutLayer.ReadoutData readoutData = stateMachine.ComputeReadoutData(predictionInputVector); + double[] outputVector = stateMachine.Compute(predictionInputVector, out ReadoutLayer.ReadoutData readoutData); _log.Write(" Forecasted next High and Low TTOO prices (real prices on 2018/03/05 are High=6.58$ and Low=5.99$):", false); - _log.Write(stateMachine.RL.GetForecastReport(readoutData.DataVector, 6)); + _log.Write(stateMachine.RL.GetForecastReport(readoutData.NatDataVector, 6)); _log.Write(string.Empty); return; diff --git a/Demo/DemoConsoleApp/Examples/TTOOForecastFromScratch.cs b/Demo/DemoConsoleApp/Examples/TTOOForecastFromScratch.cs index cc2d939..81f629e 100644 --- a/Demo/DemoConsoleApp/Examples/TTOOForecastFromScratch.cs +++ b/Demo/DemoConsoleApp/Examples/TTOOForecastFromScratch.cs @@ -22,21 +22,18 @@ namespace Demo.DemoConsoleApp.Examples { /// + /// Example code shows how to: setup StateMachine from scratch, store configuration xml (writes in Examples sub-folder), + /// train and serialize StateMachine (writes in Examples sub-folder), load serialized StateMachine and forecast next values. /// Example uses TTOO.csv from ./Data subfolder. /// Time series contains real share prices of TTOO title from https://finance.yahoo.com/quote/TTOO/history?p=TTOO. /// The last recorded prices are from 2018/03/02 so StateMachine is predicting next High and Low prices for the following /// business day 2018/03/05 (where real prices were High = 6.58$ and Low=5.99$). - /// Example code shows how to: - /// setup StateMachine from scratch, - /// store configuration xml (writes in Examples sub-folder), - /// train and serialize StateMachine (writes in Examples sub-folder), - /// load serialized StateMachine and forecast next values /// public class TTOOForecastFromScratch : ExampleBase { //Methods /// - /// Creates input part of the neural preprocessor's configuration. + /// Creates the InputEncoder configuration. /// private InputEncoderSettings CreateInputCfg() { @@ -55,17 +52,17 @@ private InputEncoderSettings CreateInputCfg() //We use FeedingContinuousSettings.AutoBootCyclesNum so necessary number of boot cycles will be automatically determined //based on neural preprocessor structure FeedingContinuousSettings feedingContinuousCfg = new FeedingContinuousSettings(FeedingContinuousSettings.AutoBootCyclesNum); - //Create and return input configuration + //Create and return the InputEncoder configuration return new InputEncoderSettings(feedingContinuousCfg, new VaryingFieldsSettings(new InputSpikesCoderSettings(), externalFieldsCfg, null, null, RouteToReadout) ); } /// - /// Creates configuration of group of analog neurons having TanH activation. + /// Creates configuration of the group of analog neurons having a TanH activation. /// - /// Name of the group - /// Relative share. It determines how big part of the pool will be occupied by this neuron group + /// The name of the group + /// The relative share. It determines how big part of the pool neurons will be occupied by this neuron group. private AnalogNeuronGroupSettings CreateTanHGroup(string groupName, double relShare) { //Each neuron within the group will have its own constant bias @@ -78,14 +75,14 @@ private AnalogNeuronGroupSettings CreateTanHGroup(string groupName, double relSh URandomValueSettings retainmentStrengthCfg = new URandomValueSettings(0, 0.75); RetainmentSettings retainmentCfg = new RetainmentSettings(RetainmentDensity, retainmentStrengthCfg); //Predictors configuration - //We will use Activation and ActivationSquare predictors + //We will use the Activation and ActivationPower predictors PredictorsProviderSettings predictorsCfg = new PredictorsProviderSettings(new PredictorActivationSettings(), new PredictorActivationPowerSettings() ); - //Create neuron group configuration + //Create the neuron group configuration AnalogNeuronGroupSettings groupCfg = new AnalogNeuronGroupSettings(groupName, relShare, new AFAnalogTanHSettings(), @@ -99,55 +96,55 @@ private AnalogNeuronGroupSettings CreateTanHGroup(string groupName, double relSh } /// - /// Creates 3D pool of analog neurons + /// Creates the 3D pool of analog neurons. /// - /// Name of the pool - /// Size on X dimension - /// Size on Y dimension - /// Size on Z dimension - /// Random schema interconnection density + /// The name of the pool. + /// Size on X dimension. + /// Size on Y dimension. + /// Size on Z dimension. + /// The density of the random interconnection. private PoolSettings CreateAnalogPoolCfg(string poolName, int dimX, int dimY, int dimZ, double randomInterconnectionDensity) { //Create TanH group of neurons AnalogNeuronGroupSettings grpCfg = CreateTanHGroup("Exc-TanH-Grp", 1); //We use two interconnection schemas + //Chain schema (circle shaped). We use ratio 1 so all the neurons within the pool will be connected into the circle shaped chain. + ChainSchemaSettings chainSchemaCfg = new ChainSchemaSettings(1d, true); //Random schema RandomSchemaSettings randomSchemaCfg = new RandomSchemaSettings(randomInterconnectionDensity); - //Chain circle shaped schema. We use ratio 1 so all neurons within the pool will be connected into the circle shaped chain. - ChainSchemaSettings chainSchemaCfg = new ChainSchemaSettings(1d, true); //Create pool configuration PoolSettings poolCfg = new PoolSettings(poolName, new ProportionsSettings(dimX, dimY, dimZ), new NeuronGroupsSettings(grpCfg), - new InterconnSettings(randomSchemaCfg, chainSchemaCfg) + new InterconnSettings(chainSchemaCfg, randomSchemaCfg ) ); return poolCfg; } /// - /// Creates reservoir structure configuration consisting of two interconnected pools. + /// Creates the reservoir structure configuration consisting of two interconnected pools. /// - /// Name of the reservoir structure - /// Name of the first pool - /// Name of the second pool + /// The name of the reservoir structure. + /// The name of the first pool. + /// The name of the second pool. private ReservoirStructureSettings CreateResStructCfg(string structName, string pool1Name, string pool2Name) { //Our pools will have the 5x5x5 cube shape each. So 125 neurons in each pool and 250 neurons in total. const int DimX = 5, DimY = 5, DimZ = 5; //Each pool will have random internal interconnection of the density = 0.05. In our case it means that - //each neuron will receive synapses from 0.05 * 125 = 6 other neurons within the pool. + //each neuron will receive synapses from 0.05 * 125 = 6 other randomly selected neurons. const double RandomInterconnectionDensity = 0.05; //Create pools PoolSettings pool1Cfg = CreateAnalogPoolCfg(pool1Name, DimX, DimY, DimZ, RandomInterconnectionDensity); PoolSettings pool2Cfg = CreateAnalogPoolCfg(pool2Name, DimX, DimY, DimZ, RandomInterconnectionDensity); //Pool to pool interconnection - //Connections from Pool1 to Pool2. We use targetPoolDensity=1 and sourcePoolDensity-0.02, so each neuron from + //Connections from Pool1 to Pool2. We use targetPoolDensity=1 and sourcePoolDensity=0.02, so each neuron from //Pool2 will be randomly connected to 125 * 0.02 = 3 neurons from Pool1 InterPoolConnSettings pool1To2ConnCfg = new InterPoolConnSettings(pool2Name, 1d, pool1Name, 0.02d); //Connections from Pool2 to Pool1. We use targetPoolDensity=1 and sourcePoolDensity=0.02, so each neuron from //Pool1 will be randomly connected to 125 * 0.02 = 3 neurons from Pool2 InterPoolConnSettings pool2To1ConnCfg = new InterPoolConnSettings(pool1Name, 1d, pool2Name, 0.02d); - //Create named reservoir structure configuration + //Create the reservoir structure configuration ReservoirStructureSettings resStructCfg = new ReservoirStructureSettings(structName, new PoolsSettings(pool1Cfg, pool2Cfg), new InterPoolConnsSettings(pool1To2ConnCfg, pool2To1ConnCfg) @@ -156,9 +153,10 @@ private ReservoirStructureSettings CreateResStructCfg(string structName, string } /// - /// Creates configuration of input connection (from input unit to target pool) + /// Creates the configuration of an input connection. /// - /// Target pool name + /// The name of the input field. + /// The name of the target pool. private InputConnSettings CreateInputConnCfg(string inputFieldName, string poolName) { //Create connection configuration @@ -171,14 +169,14 @@ private InputConnSettings CreateInputConnCfg(string inputFieldName, string poolN } /// - /// Creates reservoir instance configuration + /// Creates the reservoir instance configuration. /// - /// Name of the reservoir instance - /// Name of the associated reservoir structure configuration - /// Name of the first pool - /// Name of the second pool - /// Maximum delay of input synapses - /// Maximum delay of internal synapses + /// The name of the reservoir instance. + /// The name of the reservoir structure configuration. + /// The name of the first pool. + /// The name of the second pool. + /// The maximum delay of input synapses. + /// The maximum delay of internal synapses. private ReservoirInstanceSettings CreateResInstCfg(string instName, string structName, string pool1Name, @@ -203,7 +201,7 @@ int internalMaxDelay SynapseATIndifferentSettings synapseATIndifferentSettings = new SynapseATIndifferentSettings(Synapse.SynapticDelayMethod.Random, internalMaxDelay); SynapseATSettings synapseATCfg = new SynapseATSettings(SynapseATSettings.DefaultSpectralRadiusNum, synapseATInputSettings, synapseATIndifferentSettings); SynapseSettings synapseCfg = new SynapseSettings(null, synapseATCfg); - //Create reservoir instance configuration + //Create the reservoir instance configuration ReservoirInstanceSettings resInstCfg = new ReservoirInstanceSettings(instName, structName, new InputConnsSettings(inpConnHighCfg, inpConnLowCfg, inpConnAdjCloseP1Cfg, inpConnAdjCloseP2Cfg), @@ -213,12 +211,12 @@ int internalMaxDelay } /// - /// Creates neural preprocessor configuration + /// Creates the neural preprocessor configuration. /// - /// Reservoir instance name - /// Reservoir structure name - /// Name of the pool1 - /// Name of the pool2 + /// The reservoir instance name. + /// The reservoir structure configuration name. + /// The name of the pool1. + /// The name of the pool2. NeuralPreprocessorSettings CreatePreprocessorCfg(string resInstName, string resStructName, string pool1Name, string pool2Name) { //Create input configuration @@ -237,11 +235,11 @@ NeuralPreprocessorSettings CreatePreprocessorCfg(string resInstName, string resS } /// - /// Creates readout layer configuration + /// Creates the readout layer configuration. /// - /// Specifies what part of available data to be used as the fold data - /// Number of regression attempts. Each readout network will try to learn numOfAttempts times - /// Number of training epochs within an attempt + /// Specifies what part of available data to be used as the fold data. + /// Number of regression attempts. Each readout network will try to learn numOfAttempts times. + /// Number of training epochs within an attempt. ReadoutLayerSettings CreateReadoutLayerCfg(double foldDataRatio, int numOfAttempts, int numOfEpochs) { //For each output field we will use prediction of two networks @@ -257,13 +255,17 @@ ReadoutLayerSettings CreateReadoutLayerCfg(double foldDataRatio, int numOfAttemp new HiddenLayersSettings(hiddenLayerCfg), new RPropTrainerSettings(numOfAttempts, numOfEpochs) ); - //Create default networks configuration for forecasting - DefaultNetworksSettings defaultNetworksCfg = new DefaultNetworksSettings(null, new ForecastNetworksSettings(ffNet1Cfg, ffNet2Cfg)); - //Create readout units. We will forecast next High and Low prices. Both fields are real numbers. - ReadoutUnitSettings highReadoutUnitCfg = new ReadoutUnitSettings("High", new ForecastTaskSettings(new RealFeatureFilterSettings())); - ReadoutUnitSettings lowReadoutUnitCfg = new ReadoutUnitSettings("Low", new ForecastTaskSettings(new RealFeatureFilterSettings())); + //Create the cluster chain configuration for the forecast and the default configuration for the forecast task. + CrossvalidationSettings crossvalidationCfg = new CrossvalidationSettings(foldDataRatio); + TNRNetClusterRealNetworksSettings networksCfg = new TNRNetClusterRealNetworksSettings(ffNet1Cfg, ffNet2Cfg); + TNRNetClusterRealSettings realClusterCfg = new TNRNetClusterRealSettings(networksCfg, new TNRNetClusterRealWeightsSettings()); + TNRNetClusterChainRealSettings realClusterChainCfg = new TNRNetClusterChainRealSettings(crossvalidationCfg, new TNRNetClustersRealSettings(realClusterCfg)); + TaskDefaultsSettings taskDefaultsCfg = new TaskDefaultsSettings(null, realClusterChainCfg); + //Create readout unit configurations. We will forecast next High and Low prices. + ReadoutUnitSettings highReadoutUnitCfg = new ReadoutUnitSettings("High", new ForecastTaskSettings()); + ReadoutUnitSettings lowReadoutUnitCfg = new ReadoutUnitSettings("Low", new ForecastTaskSettings()); //Create readout layer configuration - ReadoutLayerSettings readoutLayerCfg = new ReadoutLayerSettings(new ClusterSettings(new CrossvalidationSettings(foldDataRatio), defaultNetworksCfg), + ReadoutLayerSettings readoutLayerCfg = new ReadoutLayerSettings(taskDefaultsCfg, new ReadoutUnitsSettings(highReadoutUnitCfg, lowReadoutUnitCfg ), @@ -273,7 +275,7 @@ ReadoutLayerSettings CreateReadoutLayerCfg(double foldDataRatio, int numOfAttemp } /// - /// Creates state machine configuration + /// Creates the state machine configuration. /// private StateMachineSettings CreateStateMachineCfg() { @@ -327,21 +329,21 @@ public void Run() //Serialize StateMachine string serializationFileName = Path.Combine(examplesDir, "TTOOForecastFromScratchSM.dat"); - stateMachine.SaveToFile(serializationFileName); + stateMachine.Serialize(serializationFileName); //Forecasting - ReadoutLayer.ReadoutData readoutData = stateMachine.ComputeReadoutData(predictionInputVector); + double[] outputVector = stateMachine.Compute(predictionInputVector, out ReadoutLayer.ReadoutData readoutData); _log.Write(" Forecasted next High and Low TTOO prices (real prices on 2018/03/05 are High=6.58$ and Low=5.99$):", false); - _log.Write(stateMachine.RL.GetForecastReport(readoutData.DataVector, 6)); + _log.Write(stateMachine.RL.GetForecastReport(outputVector, 6)); _log.Write(string.Empty); //Create new StateMachine instance from the file //Instance was serialized before forecasting of the next values StateMachine stateMachineNewInstance = StateMachine.LoadFromFile(serializationFileName); //Forecasting of the deserialized instance (exactly the same results as in previous forecasting) - readoutData = stateMachineNewInstance.ComputeReadoutData(predictionInputVector); + outputVector = stateMachineNewInstance.Compute(predictionInputVector, out readoutData); _log.Write(" Forecast of the new StateMachine instance:", false); - _log.Write(stateMachineNewInstance.RL.GetForecastReport(readoutData.DataVector, 6)); + _log.Write(stateMachineNewInstance.RL.GetForecastReport(outputVector, 6)); _log.Write(string.Empty); diff --git a/Demo/DemoConsoleApp/Log/ConsoleLog.cs b/Demo/DemoConsoleApp/Log/ConsoleLog.cs index 42ad705..2585192 100644 --- a/Demo/DemoConsoleApp/Log/ConsoleLog.cs +++ b/Demo/DemoConsoleApp/Log/ConsoleLog.cs @@ -4,8 +4,7 @@ namespace Demo.DemoConsoleApp.Log { /// - /// Implements a simple output journal (IOutputLog interface). - /// Messages are written to the system console. + /// Implements the simple output log using system console. /// public class ConsoleLog : IOutputLog { @@ -31,7 +30,6 @@ public ConsoleLog() Console.WindowTop = 0; Console.WindowWidth = Console.LargestWindowWidth; #endif - //Clear the console Console.Clear(); //Store current cursor position @@ -43,7 +41,7 @@ public ConsoleLog() //Methods /// - /// Restores the cursor position + /// Restores the cursor position. /// private void RestoreCursor() { @@ -53,7 +51,7 @@ private void RestoreCursor() } /// - /// Stores the current cursor position + /// Stores the current cursor position. /// private void StoreCursor() { @@ -63,9 +61,9 @@ private void StoreCursor() } /// - /// Writes a message to the system console + /// Writes a message to the system console. /// - /// Message to be written to console + /// The message to be written to system console. /// Specifies whether to replace text of the previous message. public void Write(string message, bool replaceLastMessage = false) { diff --git a/Demo/DemoConsoleApp/Log/IOutputLog.cs b/Demo/DemoConsoleApp/Log/IOutputLog.cs index 34d6fe1..1b35c22 100644 --- a/Demo/DemoConsoleApp/Log/IOutputLog.cs +++ b/Demo/DemoConsoleApp/Log/IOutputLog.cs @@ -1,14 +1,14 @@ namespace Demo.DemoConsoleApp.Log { /// - /// Interface for a simple output journal + /// The interface of a simple output journal. /// public interface IOutputLog { /// - /// Writes the given message to output. + /// Writes the message to output. /// - /// The message to be written to output + /// The message to be written to output. /// Indicates whether to replace a text of the last message by the new one. void Write(string message, bool replaceLastMessage = false); diff --git a/Demo/DemoConsoleApp/Playground.cs b/Demo/DemoConsoleApp/Playground.cs index b15d858..43e0561 100644 --- a/Demo/DemoConsoleApp/Playground.cs +++ b/Demo/DemoConsoleApp/Playground.cs @@ -1,20 +1,19 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Text; -using System.Linq; +using RCNet.CsvTools; using RCNet.Extensions; +using RCNet.MathTools; using RCNet.Neural.Activation; -using RCNet.Neural.Data.Transformers; -using RCNet.Neural.Data.Generators; +using RCNet.Neural.Data; using RCNet.Neural.Data.Coders.AnalogToSpiking; -using RCNet.CsvTools; -using RCNet.MathTools; using RCNet.Neural.Data.Filter; -using RCNet.Neural.Data; +using RCNet.Neural.Data.Generators; +using RCNet.Neural.Data.Transformers; using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.NonRecurrent.FF; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; namespace Demo.DemoConsoleApp { @@ -122,7 +121,7 @@ private void TestTransformers() transformer = new YeoJohnsonTransformer(singleFieldList, new YeoJohnsonTransformerSettings(singleFieldList[0], 0.5d)); TestSingleFieldTransformer(transformer); //MWStat transformer - transformer = new MWStatTransformer(singleFieldList, new MWStatTransformerSettings(singleFieldList[0], 5, BasicStat.OutputFeature.RootMeanSquare)); + transformer = new MWStatTransformer(singleFieldList, new MWStatTransformerSettings(singleFieldList[0], 5, BasicStat.StatisticalFigure.RootMeanSquare)); TestSingleFieldTransformer(transformer); //Mul transformer transformer = new MulTransformer(twoFieldsList, new MulTransformerSettings(twoFieldsList[0], twoFieldsList[1])); @@ -146,11 +145,11 @@ private void GenSteadyPatternedMGData(int minTau, int maxTau, int tauSamples, in MackeyGlassGenerator mgg = new MackeyGlassGenerator(new MackeyGlassGeneratorSettings(tau)); int neededDataLength = 1 + patternLength + (tauSamples - 1); double[] mggData = new double[neededDataLength]; - for(int i = 0; i < neededDataLength; i++) + for (int i = 0; i < neededDataLength; i++) { mggData[i] = mgg.Next(); } - for(int i = 0; i < tauSamples; i++) + for (int i = 0; i < tauSamples; i++) { DelimitedStringValues patternData = new DelimitedStringValues(); //Steady data @@ -185,7 +184,7 @@ private void GenSteadyPatternedMGData(int minTau, int maxTau, int tauSamples, in private string ByteArrayToString(byte[] arr) { StringBuilder builder = new StringBuilder(arr.Length); - for(int i = 0; i < arr.Length; i++) + for (int i = 0; i < arr.Length; i++) { builder.Append(arr[i].ToString()); } @@ -248,33 +247,6 @@ private void TestA2SCoder() return; } - private void TestEnumFeatureFilter() - { - int enumerations = 10; - EnumFeatureFilter filter = new EnumFeatureFilter(Interval.IntZP1, new EnumFeatureFilterSettings(enumerations)); - Random rand = new Random(); - for(int i = 0; i < 200; i++) - { - filter.Update((double)rand.Next(1, enumerations)); - } - - Console.WriteLine($"{filter.GetType().Name} ApplyFilter"); - for (int i = 1; i <= 10; i++) - { - Console.WriteLine($" {i.ToString(CultureInfo.InvariantCulture),-20} {filter.ApplyFilter(i)}"); - } - - Console.WriteLine($"{filter.GetType().Name} ApplyReverse"); - int pieces = 100; - for(int i = 0; i <= pieces; i++) - { - double value = (double)i * (1d / pieces); - Console.WriteLine($" {value.ToString(CultureInfo.InvariantCulture),-20} {filter.ApplyReverse(value)}"); - } - Console.ReadLine(); - - } - private void TestBinFeatureFilter() { BinFeatureFilter filter = new BinFeatureFilter(Interval.IntZP1, new BinFeatureFilterSettings()); @@ -300,7 +272,7 @@ private void TestBinFeatureFilter() Console.ReadLine(); } - private void TestVectorBundleFolderization(string dataFile, int numOfClasses) + private void TestDataBundleFolderization(string dataFile, int numOfClasses) { //Load csv data CsvDataHolder csvData = new CsvDataHolder(dataFile); @@ -322,9 +294,9 @@ private void TestVectorBundleFolderization(string dataFile, int numOfClasses) classesBin1Counts.Populate(0); for (int sampleIdx = 0; sampleIdx < numOfFoldSamples; sampleIdx++) { - for(int classIdx = 0; classIdx < numOfClasses; classIdx++) + for (int classIdx = 0; classIdx < numOfClasses; classIdx++) { - if(folds[foldIdx].OutputVectorCollection[sampleIdx][classIdx] >= binBorder) + if (folds[foldIdx].OutputVectorCollection[sampleIdx][classIdx] >= binBorder) { ++classesBin1Counts[classIdx]; } @@ -333,7 +305,7 @@ private void TestVectorBundleFolderization(string dataFile, int numOfClasses) Console.WriteLine($" Number of positive samples per class"); for (int classIdx = 0; classIdx < numOfClasses; classIdx++) { - Console.WriteLine($" ClassID={classIdx.ToString(CultureInfo.InvariantCulture), -3}, Bin1Samples={classesBin1Counts[classIdx].ToString(CultureInfo.InvariantCulture)}"); + Console.WriteLine($" ClassID={classIdx.ToString(CultureInfo.InvariantCulture),-3}, Bin1Samples={classesBin1Counts[classIdx].ToString(CultureInfo.InvariantCulture)}"); } } Console.ReadLine(); @@ -344,22 +316,22 @@ private void TestVectorBundleFolderization(string dataFile, int numOfClasses) /// /// Displays information about the readout unit regression progress. /// - /// Current state of the regression process + /// Current state of the regression process /// Indicates that the best readout unit was changed as a result of the performed epoch - private void OnRegressionEpochDone(TrainedOneTakesAllNetworkBuilder.BuildingState buildingState, bool foundBetter) + private void OnRegressionEpochDone(TNRNetBuilder.BuildProgress buildProgress, bool foundBetter) { int reportEpochsInterval = 5; //Progress info if (foundBetter || - (buildingState.Epoch % reportEpochsInterval) == 0 || - buildingState.Epoch == buildingState.MaxEpochs || - (buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1) + (buildProgress.Epoch % reportEpochsInterval) == 0 || + buildProgress.Epoch == buildProgress.MaxEpochs || + (buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1) ) { //Build progress report message - string progressText = buildingState.GetProgressInfo(4); + string progressText = buildProgress.GetInfo(4); //Report the progress - if((buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1)) + if ((buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1)) { Console.WriteLine(); } @@ -369,10 +341,10 @@ private void OnRegressionEpochDone(TrainedOneTakesAllNetworkBuilder.BuildingStat } - private void TestTrainedOneTakesAllClusterAndBuilder(string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio = 0.1d) + private void TestProbabilisticClusterAndBuilder(string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio = 0.1d) { Console.BufferWidth = 320; - Console.WriteLine("One Takes All - Cluster and Cluster builder test"); + Console.WriteLine("Probabilistic cluster and builder test"); //Load csv data and create vector bundle Console.WriteLine($"Loading {trainDataFile}..."); CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile); @@ -388,15 +360,159 @@ private void TestTrainedOneTakesAllClusterAndBuilder(string trainDataFile, strin new RPropTrainerSettings(5, 750) ) ); + ITNRNetClusterSettings clusterCfg = new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs), + new TNRNetClusterProbabilisticWeightsSettings() + ); + TNRNetClusterBuilder builder = + new TNRNetClusterBuilder("Probabilistic", + "Cluster", + new CrossvalidationSettings(foldDataRatio), + clusterCfg, + null, + null + ); + builder.EpochDone += OnRegressionEpochDone; + FeatureFilterBase[] filters = new BinFeatureFilter[numOfClasses]; + for (int i = 0; i < numOfClasses; i++) + { + filters[i] = new BinFeatureFilter(Interval.IntZP1); + } + TNRNetCluster tc = builder.Build(trainData, filters); + + //VERIFICATION + Console.WriteLine(); + Console.WriteLine(); + Console.WriteLine($"Cluster verification on {verifyDataFile}..."); + Console.WriteLine(); + int numOfErrors = 0; + for (int i = 0; i < verifyData.InputVectorCollection.Count; i++) + { + double[] computed = tc.Compute(verifyData.InputVectorCollection[i], out _); + int computedWinnerIdx = computed.MaxIdx(); + int realWinnerIdx = verifyData.OutputVectorCollection[i].MaxIdx(); + if (computedWinnerIdx != realWinnerIdx) ++numOfErrors; + Console.Write("\x0d" + $"({i + 1}/{verifyData.InputVectorCollection.Count}) Errors:{numOfErrors}..."); + } + Console.WriteLine(); + Console.WriteLine($"Accuracy {(1d - (double)numOfErrors / (double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}"); + Console.WriteLine(); + + return; + } + + private void TestProbabilisticClusterChainAndBuilder(string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio = 0.1d) + { + Console.BufferWidth = 320; + Console.WriteLine("Probabilistic cluster chain and builder test"); + //Load csv data and create vector bundle + Console.WriteLine($"Loading {trainDataFile}..."); + CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile); + VectorBundle trainData = VectorBundle.Load(trainCsvData, numOfClasses); + Console.WriteLine($"Loading {verifyDataFile}..."); + CsvDataHolder verifyCsvData = new CsvDataHolder(verifyDataFile); + VectorBundle verifyData = VectorBundle.Load(verifyCsvData, numOfClasses); + Console.WriteLine($"Chain training on {trainDataFile}..."); + //Common crossvalidation configuration + CrossvalidationSettings crossvalidationCfg = new CrossvalidationSettings(foldDataRatio, 0, 2); + //TRAINING + List netCfgs1 = new List(); + netCfgs1.Add(new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(), + new HiddenLayersSettings(new HiddenLayerSettings(20, new AFAnalogTanHSettings())), + new RPropTrainerSettings(5, 750) + ) + ); + netCfgs1.Add(new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(), + new HiddenLayersSettings(new HiddenLayerSettings(20, new AFAnalogLeakyReLUSettings())), + new RPropTrainerSettings(5, 750) + ) + ); + TNRNetClusterProbabilisticSettings clusterCfg1 = new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs1), + new TNRNetClusterProbabilisticWeightsSettings() + ); + List netCfgs2 = new List(); + netCfgs2.Add(new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(), + new HiddenLayersSettings(new HiddenLayerSettings(20, new AFAnalogTanHSettings())), + new RPropTrainerSettings(5, 750) + ) + ); + TNRNetClusterProbabilisticSettings clusterCfg2 = new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs2), + new TNRNetClusterProbabilisticWeightsSettings() + ); + List clusterCfgCollection = new List() { clusterCfg1, clusterCfg2 }; + TNRNetClusterChainBuilder builder = + new TNRNetClusterChainBuilder("Probabilistic", + "Chain", + new TNRNetClusterChainProbabilisticSettings(crossvalidationCfg, new TNRNetClustersProbabilisticSettings(clusterCfgCollection)), + null, + null + ); + builder.EpochDone += OnRegressionEpochDone; + FeatureFilterBase[] filters = new BinFeatureFilter[numOfClasses]; + for (int i = 0; i < numOfClasses; i++) + { + filters[i] = new BinFeatureFilter(Interval.IntZP1); + } + TNRNetClusterChain chain = builder.Build(trainData, filters); + + //VERIFICATION + Console.WriteLine(); + Console.WriteLine(); + Console.WriteLine($"Chain verification on {verifyDataFile}..."); + Console.WriteLine(); + int numOfErrors = 0; + for (int i = 0; i < verifyData.InputVectorCollection.Count; i++) + { + double[] computed = chain.Compute(verifyData.InputVectorCollection[i], out _); + int computedWinnerIdx = computed.MaxIdx(); + int realWinnerIdx = verifyData.OutputVectorCollection[i].MaxIdx(); + if (computedWinnerIdx != realWinnerIdx) ++numOfErrors; + Console.Write("\x0d" + $"({i + 1}/{verifyData.InputVectorCollection.Count}) Errors:{numOfErrors}..."); + } + Console.WriteLine(); + Console.WriteLine($"Accuracy {(1d - (double)numOfErrors / (double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}"); + Console.WriteLine(); + + return; + } - TrainedOneTakesAllNetworkClusterBuilder builder = - new TrainedOneTakesAllNetworkClusterBuilder("Test", - netCfgs, - null, - null - ); - builder.RegressionEpochDone += OnRegressionEpochDone; - TrainedOneTakesAllNetworkCluster tc = builder.Build(trainData, new CrossvalidationSettings(foldDataRatio)); + + private void TestRealClusterAndBuilder(string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio = 0.1d) + { + Console.BufferWidth = 320; + Console.WriteLine("Real cluster and builder test"); + //Load csv data and create vector bundle + Console.WriteLine($"Loading {trainDataFile}..."); + CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile); + VectorBundle trainData = VectorBundle.Load(trainCsvData, numOfClasses); + Console.WriteLine($"Loading {verifyDataFile}..."); + CsvDataHolder verifyCsvData = new CsvDataHolder(verifyDataFile); + VectorBundle verifyData = VectorBundle.Load(verifyCsvData, numOfClasses); + Console.WriteLine($"Cluster training on {trainDataFile}..."); + //TRAINING + List netCfgs = new List(); + netCfgs.Add(new FeedForwardNetworkSettings(new AFAnalogIdentitySettings(), + new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogTanHSettings())), + new RPropTrainerSettings(5, 750) + ) + ); + ITNRNetClusterSettings clusterCfg = new TNRNetClusterRealSettings(new TNRNetClusterRealNetworksSettings(netCfgs), + new TNRNetClusterRealWeightsSettings() + ); + TNRNetClusterBuilder builder = + new TNRNetClusterBuilder("Real", + "Cluster", + new CrossvalidationSettings(foldDataRatio), + clusterCfg, + null, + null + ); + builder.EpochDone += OnRegressionEpochDone; + FeatureFilterBase[] filters = new BinFeatureFilter[numOfClasses]; + for (int i = 0; i < numOfClasses; i++) + { + filters[i] = new BinFeatureFilter(Interval.IntZP1); + } + TNRNetCluster tc = builder.Build(trainData, filters); //VERIFICATION Console.WriteLine(); @@ -404,16 +520,16 @@ private void TestTrainedOneTakesAllClusterAndBuilder(string trainDataFile, strin Console.WriteLine($"Cluster verification on {verifyDataFile}..."); Console.WriteLine(); int numOfErrors = 0; - for(int i = 0; i < verifyData.InputVectorCollection.Count; i++) + for (int i = 0; i < verifyData.InputVectorCollection.Count; i++) { - double[] computed = tc.Compute(verifyData.InputVectorCollection[i]); + double[] computed = tc.Compute(verifyData.InputVectorCollection[i], out _); int computedWinnerIdx = computed.MaxIdx(); int realWinnerIdx = verifyData.OutputVectorCollection[i].MaxIdx(); if (computedWinnerIdx != realWinnerIdx) ++numOfErrors; - Console.Write("\x0d" + $"({i+1}/{verifyData.InputVectorCollection.Count}) Errors:{numOfErrors}..."); + Console.Write("\x0d" + $"({i + 1}/{verifyData.InputVectorCollection.Count}) Errors:{numOfErrors}..."); } Console.WriteLine(); - Console.WriteLine($"Accuracy {(1d - (double)numOfErrors/(double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}"); + Console.WriteLine($"Accuracy {(1d - (double)numOfErrors / (double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}"); Console.WriteLine(); return; @@ -426,9 +542,15 @@ public void Run() { Console.Clear(); //TODO - place your code here - //TestVectorBundleFolderization("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", 3); - TestTrainedOneTakesAllClusterAndBuilder("./Data/LibrasMovement_train.csv", "./Data/LibrasMovement_verify.csv", 15, 0.1d); - TestTrainedOneTakesAllClusterAndBuilder("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", "./Data/ProximalPhalanxOutlineAgeGroup_verify.csv", 3, 0.1d); + //TestDataBundleFolderization("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", 3); + + TestProbabilisticClusterChainAndBuilder("./Data/LibrasMovement_train.csv", "./Data/LibrasMovement_verify.csv", 15, 0.1d); + TestProbabilisticClusterChainAndBuilder("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", "./Data/ProximalPhalanxOutlineAgeGroup_verify.csv", 3, 0.1d); + + TestProbabilisticClusterAndBuilder("./Data/LibrasMovement_train.csv", "./Data/LibrasMovement_verify.csv", 15, 0.1d); + TestProbabilisticClusterAndBuilder("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", "./Data/ProximalPhalanxOutlineAgeGroup_verify.csv", 3, 0.1d); + TestRealClusterAndBuilder("./Data/LibrasMovement_train.csv", "./Data/LibrasMovement_verify.csv", 15, 0.1d); + TestRealClusterAndBuilder("./Data/ProximalPhalanxOutlineAgeGroup_train.csv", "./Data/ProximalPhalanxOutlineAgeGroup_verify.csv", 3, 0.1d); return; } diff --git a/Demo/DemoConsoleApp/Program.cs b/Demo/DemoConsoleApp/Program.cs index 2cf94ff..b4edeb9 100644 --- a/Demo/DemoConsoleApp/Program.cs +++ b/Demo/DemoConsoleApp/Program.cs @@ -90,7 +90,7 @@ static void Main() case "6": try { - (new LibrasClassificationLSMDesigner()).Run(InputEncoder.SpikingInputEncodingRegime.Horizontal); + (new LibrasClassificationLSMDesigner()).Run(InputEncoder.InputSpikesCoding.Horizontal); } catch (Exception e) { @@ -101,7 +101,7 @@ static void Main() case "7": try { - (new LibrasClassificationLSMDesigner()).Run(InputEncoder.SpikingInputEncodingRegime.Vertical); + (new LibrasClassificationLSMDesigner()).Run(InputEncoder.InputSpikesCoding.Vertical); } catch (Exception e) { @@ -112,7 +112,7 @@ static void Main() case "8": try { - (new LibrasClassificationLSMDesigner()).Run(InputEncoder.SpikingInputEncodingRegime.Forbidden); + (new LibrasClassificationLSMDesigner()).Run(InputEncoder.InputSpikesCoding.Forbidden); } catch (Exception e) { diff --git a/Demo/DemoConsoleApp/SM/SMDemo.cs b/Demo/DemoConsoleApp/SM/SMDemo.cs index 9550249..5619602 100644 --- a/Demo/DemoConsoleApp/SM/SMDemo.cs +++ b/Demo/DemoConsoleApp/SM/SMDemo.cs @@ -4,13 +4,14 @@ using RCNet.Neural.Network.NonRecurrent; using RCNet.Neural.Network.SM; using RCNet.Neural.Network.SM.Preprocessing; +using RCNet.Neural.Network.SM.Readout; using System; using System.Diagnostics; namespace Demo.DemoConsoleApp.SM { /// - /// Demonstrates the State Machine usage, performing demo cases defined in xml file. + /// Performs the demo cases defined in xml file, demonstrates the State Machine usage. /// public class SMDemo { @@ -26,10 +27,10 @@ public SMDemo(IOutputLog log) //Event handlers /// - /// Displays information about the verification progress. + /// Displays an information about the verification progress. /// - /// Total number of inputs to be processed - /// Number of processed inputs + /// The total number of inputs to be processed. + /// The number of already processed inputs. private void OnVerificationProgressChanged(int totalNumOfInputs, int numOfProcessedInputs) { //Display progress @@ -41,11 +42,11 @@ private void OnVerificationProgressChanged(int totalNumOfInputs, int numOfProces } /// - /// Displays information about the preprocessing progress and at the end displays important NeuralPreprocessor's statistics. + /// Displays an information about the preprocessing progress and at the end displays important NeuralPreprocessor's statistics. /// - /// Total number of inputs to be processed - /// Number of processed inputs - /// Final overview of the preprocessing phase + /// The total number of inputs to be processed. + /// The number of already processed inputs. + /// The final overview of the preprocessing. private void OnPreprocessingProgressChanged(int totalNumOfInputs, int numOfProcessedInputs, NeuralPreprocessor.PreprocessingOverview finalPreprocessingOverview @@ -70,49 +71,49 @@ NeuralPreprocessor.PreprocessingOverview finalPreprocessingOverview } /// - /// Displays information about the readout unit regression progress. + /// Displays information about the build process progress. /// - /// Current state of the regression process - /// Indicates that the best readout unit was changed as a result of the performed epoch - private void OnRegressionEpochDone(TrainedNetworkBuilder.BuildingState buildingState, bool foundBetter) + /// The current state of the build process. + /// Indicates that the best network so far was found during the last performed epoch. + private void OnEpochDone(TNRNetBuilder.BuildProgress buildProgress, bool foundBetter) { int reportEpochsInterval = 5; //Progress info if (foundBetter || - (buildingState.Epoch % reportEpochsInterval) == 0 || - buildingState.Epoch == buildingState.MaxEpochs || - (buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1) + (buildProgress.Epoch % reportEpochsInterval) == 0 || + buildProgress.Epoch == buildProgress.MaxEpochs || + (buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1) ) { //Build progress report message - string progressText = buildingState.GetProgressInfo(4); + string progressText = buildProgress.GetInfo(4); //Report the progress - _log.Write(progressText, !(buildingState.Epoch == 1 && buildingState.RegrAttemptNumber == 1)); + _log.Write(progressText, !(buildProgress.Epoch == 1 && buildProgress.AttemptNumber == 1)); } return; } //Methods /// - /// Performs specified demo case. + /// Performs the demo case. /// - /// An instance of DemoSettings.CaseSettings to be performed - public void PerformDemoCase(SMDemoSettings.CaseSettings demoCaseParams) + /// The configuration of the demo case to be performed. + public void PerformDemoCase(SMDemoSettings.CaseSettings demoCaseCfg) { bool continuousFeedingDataFormat = false; //Prediction input vector (relevant only for input continuous feeding) double[] predictionInputVector = null; //Log start - _log.Write(" Performing demo case " + demoCaseParams.Name, false); + _log.Write(" Performing demo case " + demoCaseCfg.Name, false); _log.Write(" ", false); //Instantiate the StateMachine - StateMachine stateMachine = new StateMachine(demoCaseParams.StateMachineCfg); + StateMachine stateMachine = new StateMachine(demoCaseCfg.StateMachineCfg); ////////////////////////////////////////////////////////////////////////////////////// //Train StateMachine - //Register to RegressionEpochDone event - stateMachine.RL.RegressionEpochDone += OnRegressionEpochDone; + //Register to EpochDone event + stateMachine.RL.EpochDone += OnEpochDone; StateMachine.TrainingResults trainingResults; - CsvDataHolder trainingCsvData = new CsvDataHolder(demoCaseParams.TrainingDataFileName); + CsvDataHolder trainingCsvData = new CsvDataHolder(demoCaseCfg.TrainingDataFileName); VectorBundle trainingData; if (trainingCsvData.ColNameCollection.NumOfStringValues > 0) { @@ -124,15 +125,15 @@ public void PerformDemoCase(SMDemoSettings.CaseSettings demoCaseParams) throw new InvalidOperationException($"Incorrect file format. When NeuralPreprocessor is bypassed, only patterned data are allowed."); } trainingData = VectorBundle.Load(trainingCsvData, - demoCaseParams.StateMachineCfg.NeuralPreprocessorCfg.InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.GetFieldNames(), - demoCaseParams.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection, + demoCaseCfg.StateMachineCfg.NeuralPreprocessorCfg.InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.GetFieldNames(), + demoCaseCfg.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection, out predictionInputVector ); } else { //Patterned feeding data format - trainingData = VectorBundle.Load(trainingCsvData, demoCaseParams.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection.Count); + trainingData = VectorBundle.Load(trainingCsvData, demoCaseCfg.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection.Count); } if (stateMachine.NP != null) { @@ -150,29 +151,29 @@ out predictionInputVector ////////////////////////////////////////////////////////////////////////////////////// //Verification of training quality on verification data - if (demoCaseParams.VerificationDataFileName.Length > 0) + if (demoCaseCfg.VerificationDataFileName.Length > 0) { stateMachine.VerificationProgressChanged += OnVerificationProgressChanged; StateMachine.VerificationResults verificationResults; - CsvDataHolder verificationCsvData = new CsvDataHolder(demoCaseParams.VerificationDataFileName); + CsvDataHolder verificationCsvData = new CsvDataHolder(demoCaseCfg.VerificationDataFileName); VectorBundle verificationData; if (continuousFeedingDataFormat) { //Continuous input feeding //Last known input values from training (predictionInputVector) must be pushed into the reservoirs to keep time series continuity //(first input data in verification.csv is output of the last data in training.csv) - double[] tmp = stateMachine.Compute(predictionInputVector); + double[] tmp = stateMachine.Compute(predictionInputVector, out ReadoutLayer.ReadoutData readoutData); //Load verification data and get new predictionInputVector for final prediction verificationData = VectorBundle.Load(verificationCsvData, - demoCaseParams.StateMachineCfg.NeuralPreprocessorCfg.InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.GetFieldNames(), - demoCaseParams.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection, + demoCaseCfg.StateMachineCfg.NeuralPreprocessorCfg.InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.GetFieldNames(), + demoCaseCfg.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection, out predictionInputVector ); } else { //Patterned feeding data format - verificationData = VectorBundle.Load(verificationCsvData, demoCaseParams.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection.Count); + verificationData = VectorBundle.Load(verificationCsvData, demoCaseCfg.StateMachineCfg.ReadoutLayerCfg.OutputFieldNameCollection.Count); } verificationResults = stateMachine.Verify(verificationData); _log.Write(string.Empty); @@ -182,10 +183,10 @@ out predictionInputVector _log.Write(string.Empty); } - //Perform prediction in case the input feeding is continuous (we know the input but we don't know the ideal output) + //Perform prediction in case of input feeding is continuous (we know the input but we don't know the ideal output) if (continuousFeedingDataFormat) { - double[] predictionOutputVector = stateMachine.Compute(predictionInputVector); + double[] predictionOutputVector = stateMachine.Compute(predictionInputVector, out ReadoutLayer.ReadoutData readoutData); string predictionReport = stateMachine.RL.GetForecastReport(predictionOutputVector, 6); _log.Write(" Forecasts", false); _log.Write(predictionReport); @@ -196,23 +197,23 @@ out predictionInputVector } /// - /// Runs State Machine demo. This is the main function. - /// Executes demo cases defined in xml file. + /// Runs the State Machine demo. + /// Executes the demo cases defined in xml file one by one. /// - /// Xml file containing definitions of demo cases to be performed - public void RunDemo(string demoSettingsXmlFile) + /// The name of the xml file containing the definitions of demo cases to be performed. + public void RunDemo(string demoCasesXmlFile) { _log.Write("State Machine demo started"); - //Instantiate demo settings from the xml file - SMDemoSettings demoSettings = new SMDemoSettings(demoSettingsXmlFile); - //Loop through all demo cases + //Instantiate the demo configuration from the xml file + SMDemoSettings demoCfg = new SMDemoSettings(demoCasesXmlFile); + //Loop through the demo cases Stopwatch sw = new Stopwatch(); - foreach (SMDemoSettings.CaseSettings demoCaseParams in demoSettings.CaseCfgCollection) + foreach (SMDemoSettings.CaseSettings caseCfg in demoCfg.CaseCfgCollection) { sw.Reset(); sw.Start(); //Execute the demo case - PerformDemoCase(demoCaseParams); + PerformDemoCase(caseCfg); sw.Stop(); TimeSpan ts = sw.Elapsed; _log.Write(" Run time of demo case: " + String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10)); diff --git a/Demo/DemoConsoleApp/SM/SMDemoSettings.cs b/Demo/DemoConsoleApp/SM/SMDemoSettings.cs index 7a865cb..363569e 100644 --- a/Demo/DemoConsoleApp/SM/SMDemoSettings.cs +++ b/Demo/DemoConsoleApp/SM/SMDemoSettings.cs @@ -10,27 +10,26 @@ namespace Demo.DemoConsoleApp.SM { /// - /// Holds the StateMachine demo cases configurations. + /// Configuration of the StateMachine demo cases. /// public class SMDemoSettings { //Constants //Attribute properties /// - /// Location where the csv sample data files are stored. + /// The data location /// public string DataFolder { get; } /// - /// Collection of demo case definitions. + /// The collection of the demo case configurations. /// public List CaseCfgCollection { get; } //Constructor /// - /// Creates initialized instance based on given xml file. - /// This is the only way to instantiate StateMachine demo settings. + /// Creates initialized instance from the specified xml file. /// - /// Xml file consisting of demo cases definitions + /// The name of the xml file consisting of demo cases configurations. public SMDemoSettings(string fileName) { //Validate xml file and load the document @@ -60,26 +59,26 @@ public SMDemoSettings(string fileName) //Inner classes /// - /// Holds the configuration of the single demo case. + /// Implements the configuration of the single demo case. /// public class CaseSettings { //Constants //Attribute properties /// - /// Demo case descriptive name + /// The name of the demo case. /// public string Name { get; } /// - /// Demo case training data file (appropriate csv format) + /// The name of the file containing the training data. /// public string TrainingDataFileName { get; } /// - /// Demo case verification data file (appropriate csv format) + /// The name of the file containing the verification data. /// public string VerificationDataFileName { get; } /// - /// State machine configuration + /// The configuration of the State Machine. /// public StateMachineSettings StateMachineCfg { get; } @@ -91,17 +90,17 @@ public CaseSettings(XElement demoCaseElem, string dir) Name = demoCaseElem.Attribute("name").Value; //Samples XElement samplesElem = demoCaseElem.Elements("samples").First(); - //Full path to training csv file + //Training data file (full path) TrainingDataFileName = Path.Combine(dir, samplesElem.Attribute("trainingData").Value); //Verification data file if (samplesElem.Attribute("verificationData").Value.Trim().Length > 0) { - //Full path to verification csv file + //Full path VerificationDataFileName = Path.Combine(dir, samplesElem.Attribute("verificationData").Value); } else { - //Empty - no verification data + //Empty - no verification data specified VerificationDataFileName = string.Empty; } //State Machine configuration diff --git a/Demo/DemoConsoleApp/SMDemoSettings.xml b/Demo/DemoConsoleApp/SMDemoSettings.xml index 0b189e4..967bbaf 100644 --- a/Demo/DemoConsoleApp/SMDemoSettings.xml +++ b/Demo/DemoConsoleApp/SMDemoSettings.xml @@ -69,55 +69,67 @@ - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + @@ -212,34 +224,46 @@ - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - + - + - + - + - + + + + + + @@ -348,19 +372,24 @@ - - - - + - - - - - - + + + + + + + + + + + + + + @@ -430,19 +459,24 @@ - - - - + - - - - - - + + + + + + + + + + + + + + @@ -453,7 +487,7 @@ - + @@ -527,45 +561,78 @@ - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - - - + + + The uniform distribution. + + + + + The gaussian distribution. + + + + + The exponential distribution. + + + + + The gamma distribution. + + - - + + - - - - + + + The period of the pulses is constant. + + + + + The period of the pulses follows the Uniform distribution. + + + + + The period of the pulses follows the Gaussian distribution. + + + + + The period of the pulses follows the Poisson (Exponential) distribution. + + - + - Bi-directional processing without hidden neurons' reset when input data time direction to be turned. + Enabled bi-directional processing without reservoir reset when the direction to be turned. - Bi-directional processing with hidden neurons' reset when input data time direction to be turned. + Enabled bi-directional processing with reservoir reset when the direction to be turned. - Bi-directional processing is forbidden. + The bi-directional processing is forbidden. - - + + - - - - - - - - - - - - - - + + + The sum of all samples. + + + + + The sum of negative samples. + + + + + The sum of positive samples. + + + + + The sum of squared samples. + + + + + The min sample. + + + + + The max sample. + + + + + The center value between the Min and Max. + + + + + The span of the Min and Max (Max - Min). + + + + + The arithmetic average. + + + + + The mean of the squared samples. + + + + + The root of the mean of the squared samples. + + + + + The variance of the samples. + + + + + The standard deviation of the samples. + + + + + The span multiplied by the standard deviation of the samples. + + - - + + - - + + + [v1(t1),v2(t1),v1(t2),v2(t2),v1(t3),v2(t3)] where "v" means variable and "t" means time point. + + + + + [v1(t1),v1(t2),v1(t3),v2(t1),v2(t2),v2(t3)] where "v" means variable and "t" means time point. + + - - + + - - + + + The synaptic delay is decided randomly. + + + + + The synaptic delay depends on an Euclidean distance. + + - - + + - - - - + + + An input synapse. + + + + + An excitatory synapse. + + + + + An inhibitory synapse. + + + + + An indifferent synapse. + + - + - - - - - - - - - - - - - - - - - - - - - + + + A spiking activation function. Attempts to simulate the behavior of a biological neuron that accumulates (integrates) input stimulation on its membrane potential and when the critical threshold is exceeded, fires a short pulse (spike), resets membrane and the cycle starts from the beginning. In other words, the function implements one of the so-called Integrate and Fire neuron models. + + + + + An analog activation function. It has no similarity to behavior of the biological neuron. It is always stateless, which means that the output value (signal) does not depend on the previous inputs but only on current input at the time T and particular transformation equation (usually non-linear). + + - - + + - - + + + The result of the activation function. + + + + + The powered absolute value of the result of the activation function. + + + + + The statistical figure computed from the activation function results. + + + + + The rescaled range computed from the activation function results. + + + + + The linearly weighted average computed from the activation function results. + + + + + The statistical figure computed from the differences of the activation function results (A[T] - A[T-1]). + + + + + The rescaled range computed from the differences of the activation function results (A[T] - A[T-1]). + + + + + The linearly weighted average computed from the differences of the activation function results (A[T] - A[T-1]). + + + + + The traced neuron's firing. + + - - + + - + - Used is only output based on 2nd level networks. + The Euler 1st order method. - + - Used is average value of the first level output and the second level output. + The Runge-Kutta 4th order method. - - + + - - - + + + The horizontal coding. + + + + + The vertical coding. + + + + + The coding of input spikes is not allowed. + + @@ -390,26 +554,38 @@ - Default value is 0 + The X coordinate. Default value is 0. - Default value is 0 + The Y coordinate. Default value is 0. - Default value is 0 + The Z coordinate. Default value is 0. - - - + + + The X dimension. + + + + + The Y dimension. + + + + + The Z dimension. + + @@ -422,25 +598,37 @@ - Default value is 0 + The mean. Default value is 0. - Default value is 1 + The standard deviation. Default value is 1. - + + + The mean. + + - - + + + The alpha (shape parameter). + + + + + The beta (rate parameter). + + @@ -451,11 +639,19 @@ - - + + + The min value (inclusive). + + + + + The max value (exclusive). + + - Default value is false + Specifies whether to randomize the value sign. Default value is false. @@ -464,19 +660,23 @@ - Default value is 0.5 + The mean. Default value is 0.5. - Default value is 1 + The standard deviation. Default value is 1. - + + + The mean. + + @@ -487,34 +687,49 @@ - - + + + The min value (inclusive). + + + + + The max value (exclusive). + + - - - - + + + + + The min value of the parameter. + + + + + The max value of the parameter. + + - Default value is Auto + The number of sub-intervals of the currently focused interval. Default value is Auto. - - + - Default value is 8 + The number of receptors. Default value is 8. - Default value is 8 + The number of code time points per receptor. Default value is 8. @@ -523,7 +738,7 @@ - Default value is 8 + The number of code time-points. Default value is 8. @@ -532,12 +747,12 @@ - Default value is 8 + The number of receptors. Default value is 8. - Default value is 8 + The number of code time points per receptor. Default value is 8. @@ -545,26 +760,38 @@ - - - + + + The pulse signal value. + + + + + The pulse average leak. + + + + + The pulse timing mode. + + - Default value is 0 + The phase shift. Default value is 0. - Default value is 1 + The frequency. Default value is 1. - Default value is 1 + The amplitude. Default value is 1. @@ -573,17 +800,17 @@ - Default value is 18 + The tau (the backward deepness 2..18). Default value is 18. - Default value is 0.1 + The b coefficient. Default value is 0.1. - Default value is 0.2 + The c coefficient. Default value is 0.2. @@ -591,22 +818,34 @@ - + + + The name of the input field to be transformed. + + - - + + + The name of the first (X) input field. + + + + + The name of the second (Y) input field. + + - + - Default value is 1 + Specifies the interval between the current and the past value. Default value is 1. @@ -619,7 +858,7 @@ - Default value is 1 + The constant numerator. Default value is 1. @@ -632,7 +871,7 @@ - Default value is (e) 2.7182818284590451 + The base. Default value is (e) 2.7182818284590451. @@ -645,7 +884,7 @@ - Default value is (e) 2.7182818284590451 + The base. Default value is (e) 2.7182818284590451. @@ -658,12 +897,12 @@ - Default value is 0.5 + The exponent. Default value is 0.5. - Default value is true + Specifies whether to keep the original value sign. Default value is true. @@ -674,7 +913,11 @@ - + + + The lambda exponent. + + @@ -683,8 +926,16 @@ - - + + + The recent history window size. + + + + + The output statistical figure. + + @@ -709,12 +960,12 @@ - Default value is 1 + The A coefficient. Default value is 1. - Default value is 1 + The B coefficient. Default value is 1. @@ -727,28 +978,28 @@ - Typical value is 15 + The membrane resistance (Mohm). Typical value is 15. - Typical value is 0.05 + The membrane potential decay rate. Typical value is 0.05. - Typical value is 5 + The membrane reset potential (mV). Typical value is 5. - Typical value is 7.5 + The membrane firing threshold (mV). Typical value is 7.5. - Default value is 1 + The number of after-spike computation cycles while an input stimuli to be ignored (cycles). Default value is 1. @@ -758,48 +1009,48 @@ - Typical value is 8 + The membrane time scale (ms). Typical value is 8. - Typical value is 20 + The membrane resistance (Mohm). Typical value is 20. - Typical value is -70 + The membrane rest potential (mV). Typical value is -70. - Typical value is -65 + The membrane reset potential (mV). Typical value is -65. - Typical value is -50 + The membrane firing threshold (mV). Typical value is -50. - Default value is 1 + The number of after-spike computation cycles while an input stimuli to be ignored (cycles). Default value is 1. - + - Default value is Euler + The ODE numerical solver method. Default value is Euler. - Default value is 2 + The number of computation sub-steps of the ODE numerical solver. Default value is 2. - Default value is 1 (ms) + The duration of the membrane stimulation (ms). Default value is 1. @@ -809,58 +1060,58 @@ - Typical value is 12 + The membrane time scale (ms). Typical value is 12. - Typical value is 20 + The membrane resistance (Mohm). Typical value is 20. - Typical value is -65 + The membrane rest potential (mV). Typical value is -65. - Typical value is -60 + The membrane reset potential (mV). Typical value is -60. - Typical value is -55 + The membrane rheobase threshold (mV). Typical value is -55. - Typical value is -30 + The membrane firing threshold (mV). Typical value is -30. - Typical value is 2 + The sharpness of membrane potential change (mV). Typical value is 2. - Default value is 1 + The number of after-spike computation cycles while an input stimuli to be ignored (cycles). Default value is 1. - + - Default value is Euler + The ODE numerical solver method. Default value is Euler. - Default value is 2 + The number of computation sub-steps of the ODE numerical solver. Default value is 2. - Default value is 1 (ms) + The duration of the membrane stimulation (ms). Default value is 1. @@ -870,68 +1121,68 @@ - Typical value is 5 + The membrane time scale (ms). Typical value is 5. - Typical value is 500 + The membrane resistance (Mohm). Typical value is 500. - Typical value is -70 + The membrane rest potential (mV). Typical value is -70. - Typical value is -51 + The membrane reset potential (mV). Typical value is -51. - Typical value is -50 + The membrane rheobase threshold (mV). Typical value is -50. - Typical value is -30 + The membrane firing threshold (mV). Typical value is -30. - Typical value is 2 + The sharpness of membrane potential change (mV). Typical value is 2. - Typical value is 0.5 + The adaptation voltage coupling (nS). Typical value is 0.5. - Typical value is 100 + The adaptation time constant (ms). Typical value is 100. - Typical value is 7 + The spike triggered adaptation increment (pA). Typical value is 7. - + - Default value is Euler + The ODE numerical solver method. Default value is Euler. - Default value is 2 + The number of computation sub-steps of the ODE numerical solver. Default value is 2. - Default value is 1 (ms) + The duration of the membrane stimulation (ms). Default value is 1. @@ -941,53 +1192,53 @@ - Typical value is 0.02 + The dimensionless parameter "a" in the original Izhikevich model. Describes the time scale of the recovery variable. Smaller values result in slower recovery. Typical value is 0.02. - Typical value is 0.2 + The dimensionless parameter "b" in the original Izhikevich model. Describes the sensitivity of the recovery variable to the subthreshold fluctuations of the membrane potential. Typical value is 0.2. - Typical value is 2 + The dimensionless parameter "d" in the original Izhikevich model. Describes after-spike reset of the recovery variable. Typical value is 2. - Typical value is -70 + The membrane rest potential (mV). Typical value is -70. - Typical value is -65 + The membrane reset potential (mV). The parameter "c" in the original Izhikevich model. Typical value is -65. - Typical value is 30 + The membrane firing threshold (mV). Typical value is 30. - Default value is 1 + The number of after-spike computation cycles while an input stimuli to be ignored (cycles). Default value is 1. - + - Default value is Euler + The ODE numerical solver method. Default value is Euler. - Default value is 2 + The number of computation sub-steps of the ODE numerical solver. Default value is 2. - Default value is 1 (ms) + The duration of the membrane stimulation (ms). Default value is 1. @@ -996,22 +1247,22 @@ - Default value is 1 + The number of after-spike computation cycles while an input stimuli to be ignored (cycles). Default value is 1. - + - Default value is Euler + The ODE numerical solver method. Default value is Euler. - Default value is 2 + The number of computation sub-steps of the ODE numerical solver. Default value is 2. - Default value is 1 (ms) + The duration of the membrane stimulation (ms). Default value is 1. @@ -1030,7 +1281,7 @@ - Typical value is 1 + The slope of the curve. Typical value is 1. @@ -1049,7 +1300,7 @@ - Typical value is 1 + The Alpha. Typical value is 1. @@ -1060,7 +1311,7 @@ - Typical value is 0.05 + The negative slope. Typical value is 0.05. @@ -1083,7 +1334,7 @@ - Typical value is 1 + The Alpha. Typical value is 1. @@ -1120,7 +1371,11 @@ - + + + The number of layer neurons. + + @@ -1134,17 +1389,25 @@ - + - The usual values are: min=0, max=0.1, steps=10 + The noise parameter. Typical finder parameters are: min=0, max=0.1, subIntervals=10. - - + + + The number of attempts. + + + + + The number of attempt epochs. + + - Default value is 0.75 + The zero-margin of the noise. Default value is 0.75. @@ -1152,62 +1415,78 @@ - + - The usual values are: min=0, max=0.5, steps=10 + The lambda parameter. Typical finder parameters are: min=0, max=0.5, subIntervals=10. - + + + The number of attempt epochs. + + - + + + The number of attempt epochs. + + - The penalty to be applied. Default value is 1e-6 + The ridge lambda hyperparameter. Default value is 1e-6. - Ratio between Ridge and Lasso approach. Values between 0 and 1 inclusive, where 0 leads to full Ridge and 1 leads to full Lasso. Default value is 0.5 + The trade-off ratio between the Ridge (0) and the Lasso (1) approach. Default value is 0.5. - - + + + The number of attempts. + + + + + The number of attempt epochs. + + - Default value is 1E-17 + An absolute value that is still considered as zero. Default value is 1E-17. - Default value is 1.2 + The positive Eta. Default value is 1.2. - Default value is 0.5 + The negative Eta.Default value is 0.5. - Default value is 0.1 + The initial Delta. Default value is 0.1. - Default value is 1E-6 + The minimum Delta. Default value is 1E-6. - Default value is 50 + The maximum Delta. Default value is 50. @@ -1243,8 +1522,8 @@ - - + + @@ -1258,16 +1537,73 @@ + + + + + The number of attempts. + + + + + The number of attempt epochs. + + + + + The initial learning rate. Default value is 0.01. + + + + + The learning rate increment. Default value is 1.1. + + + + + The learning rate decrement. Default value is 0.5. + + + + + The min learning rate. Default value is 1E-4. + + + + + The max learning rate. Default value is 0.1. + + + + + + + + + + + + The number of the threshold gates. Default value is 3. + + + + + The output resolution. Default value is 2 (binary resolution). + + + + + - Required ratio of samples constituting one fold. Default value is 0.1. + Specifies the ratio of samples constituting one fold. Default value is 0.1. - Number of folds to be used. Default value is Auto (all available folds). + Specifies the number of folds to be used. Default value is Auto (all available folds). @@ -1277,98 +1613,222 @@ - - + + - - + - + + + + + - Computation mode of the cluster. Default value is AveragedOutputs. + Specifies the weight of the group of metrics related to training. Default value is 1. - - - - - - - + - Default value is 0.01 + Specifies the weight of the group of metrics related to testing. Default value is 1. - + - Default value is 1.1 + Specifies the weight of the number of samples metric. Default value is 1. - + - Default value is 0.5 + Specifies the weight of the numerical precision metric. Default value is 1. - + - Default value is 1E-4 + Specifies the weight of the misrecognized false metric. Default value is 1. - + - Default value is 0.1 + Specifies the weight of the unrecognized true metric. Default value is 0. - - + + - + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - Default value is 3 + Specifies the weight of the group of metrics related to training. Default value is 1. - + + + Specifies the weight of the group of metrics related to testing. Default value is 1. + + + - Default value is 2 (binary resolution) + Specifies the weight of the number of samples metric. Default value is 1. + + + Specifies the weight of the numerical precision metric. Default value is 1. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Specifies the weight of the group of metrics related to training. Default value is 1. + + + + + Specifies the weight of the group of metrics related to testing. Default value is 1. + + + + + Specifies the weight of the number of samples metric. Default value is 1. + + + + + Specifies the weight of the numerical precision metric. Default value is 1. + + + + + Specifies the weight of the misrecognized false metric. Default value is 1. + + + + + Specifies the weight of the unrecognized true metric. Default value is 0. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - Default value is true + Specifies whether to apply the data standardization. Default value is true. - Default value is true + Specifies whether to keep the range reserve for possible unseen data. Default value is true. - + - Default value is 1 + The synapse's constant efficacy. Default value is 1. @@ -1377,7 +1837,7 @@ - Default value is 1 + The synapse's constant efficacy. Default value is 1. @@ -1386,7 +1846,7 @@ - Default value is 1 + The synapse's constant efficacy. Default value is 1. @@ -1395,7 +1855,7 @@ - Default value is 1 + The synapse's constant efficacy. Default value is 1. @@ -1404,7 +1864,7 @@ - Default value is 1 + The synapse's constant efficacy. Default value is 1. @@ -1414,17 +1874,17 @@ - Default value is 0.007 (the alpha argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the alpha argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.007. - Default value is 0.739 (the beta argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the beta argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.739. - Default value is 0.75 + The value of the synapse's initial efficacy. Default value is 0.75. @@ -1433,17 +1893,17 @@ - Default value is 0.007 (the alpha argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the alpha argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.007. - Default value is 0.739 (the beta argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the beta argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.739. - Default value is 0.75 + The value of the synapse's initial efficacy. Default value is 0.75. @@ -1452,17 +1912,17 @@ - Default value is 0.007 (the alpha argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the alpha argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.007. - Default value is 0.739 (the beta argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the beta argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.739. - Default value is 0.75 + The value of the synapse's initial efficacy. Default value is 0.75. @@ -1471,17 +1931,17 @@ - Default value is 0.007 (the alpha argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the alpha argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.007. - Default value is 0.739 (the beta argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the beta argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.739. - Default value is 0.75 + The value of the synapse's initial efficacy. Default value is 0.75. @@ -1490,17 +1950,17 @@ - Default value is 0.007 (the alpha argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the alpha argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.007. - Default value is 0.739 (the beta argument in the linear expression efficacy = alpha * (spike - beta)) + The value of the beta argument in the linear expression: efficacy = alpha * (spike - beta). Default value is 0.739. - Default value is 0.75 + The value of the synapse's initial efficacy. Default value is 0.75. @@ -1511,17 +1971,17 @@ - Default value is 0.99 + The value of the resting efficacy. Default value is 0.99. - Default value is 3 ms + The value of the tau depression (ms). Default value is 3. - Default value is 1 ms + The value of the tau facilitation (ms). Default value is 1. @@ -1530,17 +1990,17 @@ - Default value is 0.99 + The value of the resting efficacy. Default value is 0.99. - Default value is 3 ms + The value of the tau depression (ms). Default value is 3. - Default value is 1 ms + The value of the tau facilitation (ms). Default value is 1. @@ -1549,17 +2009,17 @@ - Default value is 0.99 + The value of the resting efficacy. Default value is 0.99. - Default value is 3 ms + The value of the tau depression (ms). Default value is 3. - Default value is 1 ms + The value of the tau facilitation (ms). Default value is 1. @@ -1568,17 +2028,17 @@ - Default value is 0.99 + The value of the resting efficacy. Default value is 0.99. - Default value is 3 ms + The value of the tau depression (ms). Default value is 3. - Default value is 1 ms + The value of the tau facilitation (ms). Default value is 1. @@ -1587,17 +2047,17 @@ - Default value is 0.99 + The value of the resting efficacy. Default value is 0.99. - Default value is 3 ms + The value of the tau depression (ms). Default value is 3. - Default value is 1 ms + The value of the tau facilitation (ms). Default value is 1. @@ -1704,14 +2164,14 @@ - + - Default value is Random + The synaptic delay method. Default value is Random. - Default value is 0. No delay. + The maximum synaptic delay. Default value is 0 (no delay). @@ -1722,19 +2182,19 @@ - + - Default value is Random + The synaptic delay method. Default value is Random. - Default value is 0. No delay. + The maximum synaptic delay. Default value is 0 (no delay). - Default value is 4 + The relative share. Default value is 4. @@ -1745,19 +2205,19 @@ - + - Default value is Random + The synaptic delay method. Default value is Random. - Default value is 0. No delay. + The maximum synaptic delay. Default value is 0 (no delay). - Default value is 1 + The relative share. Default value is 1. @@ -1769,14 +2229,14 @@ - + - Default value is Random + The synaptic delay method. Default value is Random. - Default value is 0. No delay. + The maximum synaptic delay. Default value is 0 (no delay). @@ -1787,14 +2247,14 @@ - + - Default value is Random + The synaptic delay method. Default value is Random. - Default value is 0. No delay. + The maximum synaptic delay. Default value is 0 (no delay). @@ -1816,7 +2276,7 @@ - Default value is 0.9999. Use NA to disable spectral radius application. + The spectral radius (use NA to disable spectral radius application). Default value is 0.9999. @@ -1838,35 +2298,35 @@ - Default value is 2. + The exponent. Default value is 2. - Default value is true. + Specifies whether to keep the original sign of the activation value. Default value is true. - - + + - Specifies whether a predictor is computed continuously (NA) or from the moving data window (size 2-1024). Default value is NA. + Specifies the data window size. Default value is NA. - + - Specifies requiered statistical feature to be used. + The statistical figure. - - + + - Specifies moving data window size (2-1024). + Specifies the data window size. @@ -1875,30 +2335,30 @@ - Specifies whether a predictor is computed continuously (NA) or from the moving data window (size 2-1024). Default value is NA. + Specifies the data window size. Default value is NA. - - + + - Specifies whether a predictor is computed continuously (NA) or from the moving data window (size 2-1024). Default value is NA. + Specifies the data window size. Default value is NA. - + - Specifies requiered statistical feature to be used. + The statistical figure. - - + + - Specifies moving data window size (2-1024). + Specifies the data window size. @@ -1907,7 +2367,7 @@ - Specifies whether a predictor is computed continuously (NA) or from the moving data window (size 2-1024). Default value is NA. + Specifies the data window size. Default value is NA. @@ -1917,12 +2377,12 @@ - Specifies whether a predictor is computed continuously (NA) or from the moving data window (size 2-1024). Default value is NA. + Specifies the data window size. Default value is NA. - Trace fading strength (GE0..LE1). Default value is 0.005 + Specifies the strength of trace fading. Default value is 0.005. @@ -1933,47 +2393,47 @@ - Result of the activation function. + The result of the activation function. - Powered absolute value of the result of the activation function. + The powered absolute value of the result of the activation function. - + - Statistical feature computed from the activation function results. + The statistical feature computed from the activation function results. - + - Rescalled range computed from the activation function results. + The rescaled range computed from the activation function results. - Linearly weighted average computed from the activation function results. + The linearly weighted average computed from the activation function results. - + - Statistical feature computed from the differences of the activation function results (A[T] - A[T-1]). + The statistical figure computed from the differences of the activation function results (A[T] - A[T-1]). - + - Rescalled range computed from the differences of the activation function results (A[T] - A[T-1]). + The rescaled range computed from the differences of the activation function results (A[T] - A[T-1]). - Linearly weighted average computed from the differences of the activation function results (A[T] - A[T-1]). + The linearly weighted average computed from the differences of the activation function results (A[T] - A[T-1]). - Traced neuron's firing. + The traced neuron's firing. @@ -1989,32 +2449,32 @@ - Default value is 0.1 + The density of interconnected neurons. Default value is 0.1. - Default value is NA + The average distance of interconnected neurons (NA means the random distance). Default value is NA. - Default value is true + Specifies whether to allow neurons to be self connected. Default value is true. - Default value is false + Specifies whether to keep the constant number of synapses. Default value is false. - Default value is true + Specifies whether the connections of this schema will replace the existing connections. Default value is true. - Default value is 1 + The number of applications of this schema. Default value is 1. @@ -2023,22 +2483,22 @@ - Default value is 1 + The ratio of involved neurons. Default value is 1. - Default value is true + Specifies whether the chain will be closed to a circle. Default value is true. - Default value is true + Specifies whether the connections of this schema will replace the existing connections. Default value is true. - Default value is 1 + The number of applications of this schema. Default value is 1. @@ -2047,27 +2507,27 @@ - Default value is 1 + The ratio of involved neurons. Default value is 1. - Default value is false + Specifies whether the left diagonal neurons to be self connected. Default value is false. - Default value is false + Specifies whether the right diagonal neurons to be self connected. Default value is false. - Default value is true + Specifies whether the connections of this schema will replace the existing connections. Default value is true. - Default value is 1 + The number of applications of this schema. Default value is 1. @@ -2092,17 +2552,17 @@ - Default value is 0.75 + The total excitatory strength. Default value is 0.75. - Default value is 0.75 + The input strength ratio. Default value is 0.75. - Default value is 0.25 + The inhibitory strength ratio. Default value is 0.25. @@ -2123,8 +2583,16 @@ - - + + + The name of the neuron group. + + + + + Specifies how big relative portion of pool's neurons is formed by this group of the neurons. + + @@ -2133,7 +2601,11 @@ - + + + Specifies the ratio of the neurons having the Retainment property. + + @@ -2153,16 +2625,24 @@ - - + + + The name of the neuron group. + + + + + Specifies how big relative portion of pool's neurons is formed by this group of the neurons. + + - A number between 0 and 1 (LT1). Every time the new normalized activation value is higher than the previous normalized activation value by at least the threshold, it is evaluated as a firing event. Default value is 0.00125. + The firing threshold value. Every time the current normalized activation is higher than the normalized past reference activation by at least this threshold, it is evaluated as a firing event. Default value is 0.00125. - Maximum deepness of historical normalized activation value to be compared with current normalized activation value when evaluating firing event. Default value is 1. + Maximum age of the past activation for the evaluation of the firing event. Default value is 1. @@ -2190,7 +2670,11 @@ - + + + The name of the pool. + + @@ -2204,13 +2688,29 @@ - - - - + + + The name of the target pool. + + + + + Determines how many neurons in the target pool get connected source pool neurons. + + + + + The name of the source pool. + + + + + Determines how many neurons from the source pool to be connected to one neuron from target pool. + + - Default value is false + Specifies whether to keep constant number of connections from source neurons to target neuron. Default value is false. @@ -2239,7 +2739,11 @@ - + + + The name of the reservoir structure configuration. + + @@ -2248,7 +2752,7 @@ - Default value is Auto + Specifies the number of the boot cycles. Default value is Auto. @@ -2258,22 +2762,22 @@ - Threshold of signal begin detection. Default value is 0 (maximum sensitivity) + The threshold of the signal begin detection. Default value is 0 (maximum sensitivity). - Threshold of signal end detection. Default value is 0 (maximum sensitivity) + Threshold of signal end detection. Default value is 0 (maximum sensitivity). - Default value is true. + Specifies whether all the variables in the input pattern should have the same signal begin/end. Default value is true. - Number of time-points of the resampled pattern (resampled pattern length). Default value is Auto (keeps original time points) + Specifies whether the input pattern variable's data will be upsampled and/or downsampled to have specified fixed length (GT 0). Default value is Auto (keeps original time points). @@ -2285,12 +2789,12 @@ - Default value is false. + Specifies whether to detrend the input pattern data. Default value is false. - Default value is false. + Specifies whether to unify an amplitude of the input pattern data. Default value is false. @@ -2300,7 +2804,7 @@ - Default value is true + Specifies whether to route the steady input field to the readout layer. Default value is true. @@ -2320,17 +2824,17 @@ - Specifies how many times to collect predictors during pattern data preprocessing. Default value is 1 + Specifies how many times to collect predictors during the pattern preprocessing. Default value is 1. - Specifies whether and how to preprocess time series pattern in both time directions (doubles predictors in total). Default value is Forbidden + Specifies whether and how to preprocess pattern in both time directions. Default value is Forbidden. - + - Default value is Groupped. + Specifies the variables organization schema in the pattern. Default value is Groupped. @@ -2343,9 +2847,9 @@ - + - Default value is Forbidden. + The way of input spikes coding. Default value is Forbidden. @@ -2355,14 +2859,17 @@ - - + + + The name of the input field. + + - Default value is true + Specifies whether to route the input field to the readout layer. Default value is true. @@ -2391,10 +2898,14 @@ - + + + The name of the transformed input field. + + - Default value is true + Specifies whether to route the transformed input field to the readout layer. Default value is true. @@ -2417,10 +2928,14 @@ - + + + The name of the generated field. + + - Default value is false + Specifies whether to route the generated field to the readout layer. Default value is false. @@ -2442,7 +2957,7 @@ - Default value is false + Specifies whether to route the varying input fields to the readout layer. Default value is false. @@ -2474,16 +2989,24 @@ - - + + + The name of the input field. + + + + + The name of the target pool. + + - Default value is 1 + The density on the target spiking neurons. Default value is 1. - Default value is 1 + The density on the target analog neurons. Default value is 1. @@ -2507,8 +3030,16 @@ - - + + + The name of the reservoir instance. + + + + + The name of the reservoir structure configuration. + + @@ -2547,126 +3078,133 @@ - Specifies minimum acceptable predictor's value-span. Default value is 1e-6. + Specifies the minimum acceptable predictor's value-span. Default value is 1e-6. - - - - - - - - - - - - + - + - - + - - + - - + - - - - - - + + - - + + + - + - + - Specifies the membership in a group of classes where only one can win. Default value is NA (not applicable, ie. stand-alone class). + Specifies the membership in "One Takes All" group of the specified name or no membership if NA keyword is used. Default value is NA. - - - + - - - - + - + + + + + + + + + + + The name of the readout unit. + + - + - + + + + - - - - + - - + - + + + Specifies whether to use the group readout units final results as an input into the cluster. Default value is true. + + + - Specifies how rich will be an input for the final probabilities network. True means to use all available sub-predictions from clusters members and False means to use only already aggregated predictions from clusters. Default value is true. + Specifies whether to use the group readout units sub-results as an input into the cluster. Default value is true. - - + + + + + + + + The name of the "One Takes All" group. + + + + + - - - + + - - + + - + - - + + + The name of the predictor. + + @@ -2678,8 +3216,16 @@ - - + + + The name of the reservoir instance. + + + + + The name of the pool. + + @@ -2691,7 +3237,11 @@ - + + + The name of the input field. + + @@ -2724,7 +3274,11 @@ - + + + The name of the readout unit. + + @@ -2765,7 +3319,7 @@ - Default value is 0 + Specifies the random number generator initial seek. A value greater than or equal to 0 will always ensure the same initialization of the internal random number generator and therefore also the same internal configuration each time the state machine to be instantiated. A value less than 0 causes different internal configuration each time the state machine to be instantiated. Default value is 0. diff --git a/RCNet/RandomValue/ExponentialDistrSettings.cs b/RCNet/RandomValue/ExponentialDistrSettings.cs index dd31db9..00d9875 100644 --- a/RCNet/RandomValue/ExponentialDistrSettings.cs +++ b/RCNet/RandomValue/ExponentialDistrSettings.cs @@ -5,28 +5,28 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Exponential random distribution + /// Configuration of the Exponential random distribution. /// [Serializable] public class ExponentialDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "ExponentialDistrType"; //Attributes /// - /// Mean + /// The mean. /// public double Mean { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Mean + /// The mean. public ExponentialDistrSettings(double mean) { Mean = mean; @@ -35,9 +35,9 @@ public ExponentialDistrSettings(double mean) } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public ExponentialDistrSettings(ExponentialDistrSettings source) { Mean = source.Mean; @@ -45,9 +45,9 @@ public ExponentialDistrSettings(ExponentialDistrSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public ExponentialDistrSettings(XElement elem) { //Validation diff --git a/RCNet/RandomValue/GammaDistrSettings.cs b/RCNet/RandomValue/GammaDistrSettings.cs index 5ac405e..fe3952c 100644 --- a/RCNet/RandomValue/GammaDistrSettings.cs +++ b/RCNet/RandomValue/GammaDistrSettings.cs @@ -5,34 +5,34 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Gamma random distribution + /// Configuration of the Gamma random distribution. /// [Serializable] public class GammaDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "GammaDistrType"; //Attributes /// - /// Alpha, the shape parameter + /// The alpha (shape parameter). /// public double Alpha { get; } /// - /// Beta, the rate parameter + /// The beta (rate parameter). /// public double Beta { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Shape parameter (alpha) - /// Rate parameter (beta) + /// The alpha (shape parameter). + /// The beta (rate parameter). public GammaDistrSettings(double alpha, double beta) { Alpha = alpha; @@ -42,9 +42,9 @@ public GammaDistrSettings(double alpha, double beta) } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public GammaDistrSettings(GammaDistrSettings source) { Alpha = source.Alpha; @@ -53,9 +53,9 @@ public GammaDistrSettings(GammaDistrSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public GammaDistrSettings(XElement elem) { //Validation diff --git a/RCNet/RandomValue/GaussianDistrSettings.cs b/RCNet/RandomValue/GaussianDistrSettings.cs index b6b4d6c..5188a19 100644 --- a/RCNet/RandomValue/GaussianDistrSettings.cs +++ b/RCNet/RandomValue/GaussianDistrSettings.cs @@ -5,43 +5,43 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Gaussian random distribution + /// Configuration of the Gaussian random distribution. /// [Serializable] public class GaussianDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "GaussianDistrType"; //Default values /// - /// Default value of Mean + /// The default value of the mean. /// public const double DefaultMeanValue = 0d; /// - /// Default value of StdDev + /// The default value of the standard deviation. /// public const double DefaultStdDevValue = 1d; //Attributes /// - /// Mean + /// The mean. /// public double Mean { get; } /// - /// Standard deviation + /// The standard deviation. /// public double StdDev { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Mean - /// Standard deviation + /// The mean. + /// The standard deviation. public GaussianDistrSettings(double mean = DefaultMeanValue, double stdDev = DefaultStdDevValue) { Mean = mean; @@ -51,9 +51,9 @@ public GaussianDistrSettings(double mean = DefaultMeanValue, double stdDev = Def } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public GaussianDistrSettings(GaussianDistrSettings source) { Mean = source.Mean; @@ -62,9 +62,9 @@ public GaussianDistrSettings(GaussianDistrSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public GaussianDistrSettings(XElement elem) { //Validation diff --git a/RCNet/RandomValue/IDistrSettings.cs b/RCNet/RandomValue/IDistrSettings.cs index 5b6e276..a112848 100644 --- a/RCNet/RandomValue/IDistrSettings.cs +++ b/RCNet/RandomValue/IDistrSettings.cs @@ -1,7 +1,7 @@ namespace RCNet.RandomValue { /// - /// Common interface of random distributions configurations + /// The common interface of random distribution configurations. /// public interface IDistrSettings { diff --git a/RCNet/RandomValue/RandomCommon.cs b/RCNet/RandomValue/RandomCommon.cs index 0348368..5ff57a5 100644 --- a/RCNet/RandomValue/RandomCommon.cs +++ b/RCNet/RandomValue/RandomCommon.cs @@ -4,39 +4,39 @@ namespace RCNet.RandomValue { /// - /// Helper class supporting random values concept + /// Implements the enumerations and helper methods related to random values. /// public static class RandomCommon { //Enums /// - /// Type of the random distribution + /// The type of random distribution. /// public enum DistributionType { /// - /// Uniform distribution + /// The uniform distribution. /// Uniform, /// - /// Gaussian distribution + /// The gaussian distribution. /// Gaussian, /// - /// Exponential distribution + /// The exponential distribution. /// Exponential, /// - /// Gamma distribution + /// The gamma distribution. /// Gamma } //Static methods /// - /// Returns default name of the xml element containing settings for given distribution type + /// Gets the default name of a xml element holding the configuration of specified distribution type. /// - /// Distribution type + /// The distribution type. public static string GetDistrElemName(DistributionType distrType) { switch (distrType) @@ -55,11 +55,10 @@ public static string GetDistrElemName(DistributionType distrType) } /// - /// Creates appropriate instance of DistributionSettings based on given xml element + /// Loads the configuration of random distribution. /// - /// Xml element containing distribution settings - /// Appropriate instance of DistributionSettings - public static IDistrSettings CreateDistrSettings(XElement elem) + /// A xml element containing the configuration data. + public static IDistrSettings LoadDistrCfg(XElement elem) { switch (elem.Name.LocalName) { @@ -72,16 +71,15 @@ public static IDistrSettings CreateDistrSettings(XElement elem) case "gammaDistr": return new GammaDistrSettings(elem); default: - throw new InvalidOperationException($"Unexpected element {elem.Name.LocalName}"); + throw new ArgumentException($"Unexpected element name {elem.Name.LocalName}.", "elem"); } } /// - /// Creates appropriate instance of DistributionSettings based on given xml element (unsigned) + /// Loads the configuration of random distribution (unsigned version). /// - /// Xml element containing distribution settings - /// Appropriate instance of DistributionSettings - public static IDistrSettings CreateUDistrSettings(XElement elem) + /// A xml element containing the configuration data. + public static IDistrSettings LoadUDistrCfg(XElement elem) { switch (elem.Name.LocalName) { @@ -94,7 +92,7 @@ public static IDistrSettings CreateUDistrSettings(XElement elem) case "gammaDistr": return new GammaDistrSettings(elem); default: - throw new InvalidOperationException($"Unexpected element {elem.Name.LocalName}"); + throw new ArgumentException($"Unexpected element name {elem.Name.LocalName}.", "elem"); } } diff --git a/RCNet/RandomValue/RandomValueSettings.cs b/RCNet/RandomValue/RandomValueSettings.cs index f5cf197..a81d811 100644 --- a/RCNet/RandomValue/RandomValueSettings.cs +++ b/RCNet/RandomValue/RandomValueSettings.cs @@ -6,59 +6,59 @@ namespace RCNet.RandomValue { /// - /// Configuration of the random value + /// Configuration of the random value. /// [Serializable] public class RandomValueSettings : RCNetBaseSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "RandomValueType"; //Default values /// - /// Default value of RandomSign + /// The default value of the parameter specifying whether to randomize the value sign. /// - public const bool DefaultRandomSignValue = false; + public const bool DefaultRandomSign = false; /// - /// Default type of distribution + /// The default type of random distribution. /// public const RandomCommon.DistributionType DefaultDistributionType = RandomCommon.DistributionType.Uniform; //Attribute properties /// - /// Min random value + /// The min value (inclusive). /// public double Min { get; } /// - /// Max random value + /// The max value (exclusive). /// public double Max { get; } /// - /// Specifies whether to randomize value sign + /// Specifies whether to randomize the value sign. /// public bool RandomSign { get; } /// - /// Distribution parameters + /// The configuration of the distribution. /// public IDistrSettings DistrCfg { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Min random value - /// Max random value - /// Specifies whether to randomize value sign - /// Specific parameters of the distribution to be used + /// The min value (inclusive). + /// The max value (exclusive). + /// Specifies whether to randomize the value sign. + /// The configuration of the distribution. public RandomValueSettings(double min, double max, - bool randomSign = DefaultRandomSignValue, + bool randomSign = DefaultRandomSign, IDistrSettings distrCfg = null ) { @@ -75,9 +75,9 @@ public RandomValueSettings(double min, } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public RandomValueSettings(RandomValueSettings source) { Min = source.Min; @@ -88,9 +88,9 @@ public RandomValueSettings(RandomValueSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml data containing RandomValueSettings settings. + /// A xml element containing the configuration data. public RandomValueSettings(XElement elem) { //Validation @@ -106,7 +106,7 @@ public RandomValueSettings(XElement elem) } else { - DistrCfg = RandomCommon.CreateDistrSettings(distrParamsElem); + DistrCfg = RandomCommon.LoadDistrCfg(distrParamsElem); } Check(); return; @@ -114,7 +114,7 @@ public RandomValueSettings(XElement elem) //Properties /// - /// Checks the defaults + /// Checks the defaults. /// public bool IsDefaultDistrType { get { return DistrType == RandomCommon.DistributionType.Uniform; } } @@ -129,10 +129,18 @@ public RandomValueSettings(XElement elem) //Methods //Static methods /// - /// If exists descendant element within the root element then function creates instance of the RandomValueSettings using - /// descendant's xml settings. If not, function creates instance of the RandomValueSettings using specified default parameters. - /// - public static RandomValueSettings LoadOrDefault(XElement rootElem, string descendant, double defaultMin, double defaultMax, bool randomSign = false) + /// Loads or creates the configuration of the random value. + /// + /// + /// Checks whether exists the specified descendant element under the root element and if so, loads the configuration. + /// If the specified descendant element does not exist, creates the configuration according to specified parameters. + /// + /// The root xml element. + /// The name of descendant element containing the configuration data. + /// The min value. + /// The max value. + /// Specifies whether to randomize the value sign. + public static RandomValueSettings LoadOrCreate(XElement rootElem, string descendant, double min, double max, bool randomSign = false) { XElement descendantElement = rootElem.Elements(descendant).FirstOrDefault(); if (descendantElement != null) @@ -141,27 +149,42 @@ public static RandomValueSettings LoadOrDefault(XElement rootElem, string descen } else { - return new RandomValueSettings(defaultMin, defaultMax, randomSign); + return new RandomValueSettings(min, max, randomSign); } } /// - /// If exists descendant element within the root element then function creates instance of the RandomValueSettings using - /// descendant's xml settings. If not, function creates instance of the RandomValueSettings using specified default parameters. + /// Loads or creates the configuration of the random value. /// - public static RandomValueSettings LoadOrDefault(XElement rootElem, string descendant, double defaultConst, bool randomSign = false) + /// + /// Checks whether exists the specified descendant element under the root element and if so, loads the configuration. + /// If the specified descendant element does not exist, creates the configuration according to specified parameters. + /// + /// The root xml element. + /// The name of descendant element containing the configuration data. + /// The constant value (the same min and max values). + /// Specifies whether to randomize the value sign. + public static RandomValueSettings LoadOrCreate(XElement rootElem, string descendant, double constValue, bool randomSign = false) { - return LoadOrDefault(rootElem, descendant, defaultConst, defaultConst, randomSign); + return LoadOrCreate(rootElem, descendant, constValue, constValue, randomSign); } /// - /// If source is not null then function creates it's clone. If not, function creates instance of the RandomValueSettings using specified default parameters. + /// Clones the existing configuration or creates the new configuration of the random value. /// - public static RandomValueSettings CloneOrDefault(RandomValueSettings source, double defaultMin, double defaultMax, bool randomSign = false) + /// + /// Checks whether the specified source configuration instance is not null and if so, creates its clone. + /// If the source configuration instance is null, creates the configuration according to specified parameters. + /// + /// The source configuration instance. + /// The min value. + /// The max value. + /// Specifies whether to randomize the value sign. + public static RandomValueSettings CloneOrCreate(RandomValueSettings source, double min, double max, bool randomSign = false) { if (source == null) { - return new RandomValueSettings(defaultMin, defaultMax, randomSign); + return new RandomValueSettings(min, max, randomSign); } else { @@ -170,11 +193,18 @@ public static RandomValueSettings CloneOrDefault(RandomValueSettings source, dou } /// - /// If source is not null then function creates it's clone. If not, function creates instance of the RandomValueSettings using specified default parameters. + /// Clones the existing configuration or creates the new configuration of the random value. /// - public static RandomValueSettings CloneOrDefault(RandomValueSettings source, double defaultConst, bool randomSign = false) + /// + /// Checks whether the specified source configuration instance is not null and if so, creates its clone. + /// If the source configuration instance is null, creates the configuration according to specified parameters. + /// + /// The source configuration instance. + /// The constant value (the same min and max values). + /// Specifies whether to randomize the value sign. + public static RandomValueSettings CloneOrCreate(RandomValueSettings source, double constValue, bool randomSign = false) { - return CloneOrDefault(source, defaultConst, defaultConst, randomSign); + return CloneOrCreate(source, constValue, constValue, randomSign); } //Methods @@ -199,7 +229,7 @@ public override XElement GetXml(string rootElemName, bool suppressDefaults) { XElement rootElem = new XElement(rootElemName, new XAttribute("min", Min.ToString(CultureInfo.InvariantCulture)), new XAttribute("max", Max.ToString(CultureInfo.InvariantCulture))); - if (!suppressDefaults || RandomSign != DefaultRandomSignValue) + if (!suppressDefaults || RandomSign != DefaultRandomSign) { rootElem.Add(new XAttribute("randomSign", RandomSign.ToString(CultureInfo.InvariantCulture).ToLowerInvariant())); } diff --git a/RCNet/RandomValue/UExponentialDistrSettings.cs b/RCNet/RandomValue/UExponentialDistrSettings.cs index 920367e..671b320 100644 --- a/RCNet/RandomValue/UExponentialDistrSettings.cs +++ b/RCNet/RandomValue/UExponentialDistrSettings.cs @@ -5,28 +5,28 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Exponential random distribution (unsigned) + /// Configuration of the Exponential random distribution (unsigned version). /// [Serializable] public class UExponentialDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "UExponentialDistrType"; //Attributes /// - /// Mean + /// The mean. /// public double Mean { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Mean + /// The mean. public UExponentialDistrSettings(double mean) { Mean = mean; @@ -35,9 +35,9 @@ public UExponentialDistrSettings(double mean) } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public UExponentialDistrSettings(UExponentialDistrSettings source) { Mean = source.Mean; @@ -45,9 +45,9 @@ public UExponentialDistrSettings(UExponentialDistrSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public UExponentialDistrSettings(XElement elem) { //Validation diff --git a/RCNet/RandomValue/UGaussianDistrSettings.cs b/RCNet/RandomValue/UGaussianDistrSettings.cs index 32f8c7c..47c6d3a 100644 --- a/RCNet/RandomValue/UGaussianDistrSettings.cs +++ b/RCNet/RandomValue/UGaussianDistrSettings.cs @@ -5,43 +5,43 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Gaussian random distribution (unsigned) + /// Configuration of the Gaussian random distribution (unsigned version). /// [Serializable] public class UGaussianDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "UGaussianDistrType"; //Default values /// - /// Default value of Mean + /// The default value of mean. /// public const double DefaultMeanValue = 0.5d; /// - /// Default value of StdDev + /// The default value of standard deviation. /// public const double DefaultStdDevValue = 1d; //Attributes /// - /// Mean + /// The mean. /// public double Mean { get; } /// - /// Standard deviation + /// The standard deviation. /// public double StdDev { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Mean - /// Standard deviation + /// The mean. + /// The standard deviation. public UGaussianDistrSettings(double mean = DefaultMeanValue, double stdDev = DefaultStdDevValue) { Mean = mean; @@ -51,9 +51,9 @@ public UGaussianDistrSettings(double mean = DefaultMeanValue, double stdDev = De } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public UGaussianDistrSettings(UGaussianDistrSettings source) { Mean = source.Mean; @@ -62,9 +62,9 @@ public UGaussianDistrSettings(UGaussianDistrSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public UGaussianDistrSettings(XElement elem) { //Validation diff --git a/RCNet/RandomValue/URandomValueSettings.cs b/RCNet/RandomValue/URandomValueSettings.cs index d1403c6..713d935 100644 --- a/RCNet/RandomValue/URandomValueSettings.cs +++ b/RCNet/RandomValue/URandomValueSettings.cs @@ -6,14 +6,14 @@ namespace RCNet.RandomValue { /// - /// Configuration of the unsigned random value + /// Configuration of the unsigned random value. /// [Serializable] public class URandomValueSettings : RCNetBaseSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "URandomValueType"; @@ -25,27 +25,27 @@ public class URandomValueSettings : RCNetBaseSettings //Attribute properties /// - /// Min random value + /// The min value. /// public double Min { get; } /// - /// Max random value + /// The max value. /// public double Max { get; } /// - /// Distribution parameters + /// The configuration of the distribution. /// public IDistrSettings DistrCfg { get; } //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// - /// Min random value - /// Max random value - /// Specific parameters of the distribution + /// The min value (inclusive). + /// The max value (exclusive). + /// The configuration of the distribution. public URandomValueSettings(double min, double max, IDistrSettings distrCfg = null @@ -63,9 +63,9 @@ public URandomValueSettings(double min, } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public URandomValueSettings(URandomValueSettings source) { Min = source.Min; @@ -75,9 +75,9 @@ public URandomValueSettings(URandomValueSettings source) } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml data containing RandomValueSettings settings. + /// A xml element containing the configuration data. public URandomValueSettings(XElement elem) { //Validation @@ -92,7 +92,7 @@ public URandomValueSettings(XElement elem) } else { - DistrCfg = RandomCommon.CreateUDistrSettings(distrParamsElem); + DistrCfg = RandomCommon.LoadUDistrCfg(distrParamsElem); } Check(); return; @@ -100,7 +100,7 @@ public URandomValueSettings(XElement elem) //Properties /// - /// Checks the defaults + /// Checks the defaults. /// public bool IsDefaultDistrType { get { return DistrType == RandomCommon.DistributionType.Uniform; } } @@ -113,10 +113,17 @@ public URandomValueSettings(XElement elem) //Methods //Static methods /// - /// If exists descendant element within the root element then function creates instance of the RandomValueSettings using - /// descendant's xml settings. If not, function creates an instance of the URandomValueSettings using specified default parameters. + /// Loads or creates the configuration of the unsigned random value. /// - public static URandomValueSettings LoadOrDefault(XElement rootElem, string descendant, double defaultMin, double defaultMax) + /// + /// Checks whether exists the specified descendant element under the root element and if so, loads the configuration. + /// If the specified descendant element does not exist, creates the configuration according to specified parameters. + /// + /// The root xml element. + /// The name of descendant element containing the configuration data. + /// The min value. + /// The max value. + public static URandomValueSettings LoadOrCreate(XElement rootElem, string descendant, double min, double max) { XElement descendantElement = rootElem.Elements(descendant).FirstOrDefault(); if (descendantElement != null) @@ -125,27 +132,40 @@ public static URandomValueSettings LoadOrDefault(XElement rootElem, string desce } else { - return new URandomValueSettings(defaultMin, defaultMax); + return new URandomValueSettings(min, max); } } /// - /// If exists descendant element within the root element then function creates instance of the URandomValueSettings using - /// descendant's xml settings. If not, function creates an instance of the URandomValueSettings using specified default parameters. + /// Loads or creates the configuration of the unsigned random value. /// - public static URandomValueSettings LoadOrDefault(XElement rootElem, string descendant, double defaultConst) + /// + /// Checks whether exists the specified descendant element under the root element and if so, loads the configuration. + /// If the specified descendant element does not exist, creates the configuration according to specified parameters. + /// + /// The root xml element. + /// The name of descendant element containing the configuration data. + /// The constant value (the same min and max values). + public static URandomValueSettings LoadOrCreate(XElement rootElem, string descendant, double constValue) { - return LoadOrDefault(rootElem, descendant, defaultConst, defaultConst); + return LoadOrCreate(rootElem, descendant, constValue, constValue); } /// - /// If source is not null then function creates it's clone. If not, function creates instance of the URandomValueSettings using specified default parameters. + /// Clones the existing configuration or creates the new configuration of the unsigned random value. /// - public static URandomValueSettings CloneOrDefault(URandomValueSettings source, double defaultMin, double defaultMax) + /// + /// Checks whether the specified source configuration instance is not null and if so, creates its clone. + /// If the source configuration instance is null, creates the configuration according to specified parameters. + /// + /// The source configuration instance. + /// The min value. + /// The max value. + public static URandomValueSettings CloneOrCreate(URandomValueSettings source, double min, double max) { if (source == null) { - return new URandomValueSettings(defaultMin, defaultMax); + return new URandomValueSettings(min, max); } else { @@ -154,11 +174,17 @@ public static URandomValueSettings CloneOrDefault(URandomValueSettings source, d } /// - /// If source is not null then function creates it's clone. If not, function creates instance of the URandomValueSettings using specified default parameters. + /// Clones the existing configuration or creates the new configuration of the unsigned random value. /// - public static URandomValueSettings CloneOrDefault(URandomValueSettings source, double defaultConst) + /// + /// Checks whether the specified source configuration instance is not null and if so, creates its clone. + /// If the source configuration instance is null, creates the configuration according to specified parameters. + /// + /// The source configuration instance. + /// The constant value (the same min and max values). + public static URandomValueSettings CloneOrCreate(URandomValueSettings source, double constValue) { - return CloneOrDefault(source, defaultConst, defaultConst); + return CloneOrCreate(source, constValue, constValue); } //Methods diff --git a/RCNet/RandomValue/UniformDistrSettings.cs b/RCNet/RandomValue/UniformDistrSettings.cs index 197fd8b..7a535c1 100644 --- a/RCNet/RandomValue/UniformDistrSettings.cs +++ b/RCNet/RandomValue/UniformDistrSettings.cs @@ -4,20 +4,20 @@ namespace RCNet.RandomValue { /// - /// Configuration of the Uniform random distribution + /// Configuration of the Uniform random distribution. /// [Serializable] public class UniformDistrSettings : RCNetBaseSettings, IDistrSettings { //Constants /// - /// Name of the associated xsd type + /// The name of the associated xsd type. /// public const string XsdTypeName = "UniformDistrType"; //Constructors /// - /// Creates an initialized instance + /// Creates an initialized instance. /// public UniformDistrSettings() { @@ -26,18 +26,18 @@ public UniformDistrSettings() } /// - /// Copy constructor + /// The copy constructor. /// - /// Source instance + /// The source instance. public UniformDistrSettings(UniformDistrSettings source) { return; } /// - /// Creates an instance and initializes it from given xml element. + /// Creates an initialized instance. /// - /// Xml element containing the initialization settings. + /// A xml element containing the configuration data. public UniformDistrSettings(XElement elem) { //Validation diff --git a/RCNet/XmlTools/DocValidator.cs b/RCNet/XmlTools/DocValidator.cs index d236ffa..944d0b9 100644 --- a/RCNet/XmlTools/DocValidator.cs +++ b/RCNet/XmlTools/DocValidator.cs @@ -6,7 +6,7 @@ namespace RCNet.XmlTools { /// - /// The class provides helper xml document loading/validation functionalities + /// Implements the xml document loader and validator. /// public class DocValidator { @@ -16,7 +16,7 @@ public class DocValidator //Constructor /// - /// Instantiates a XmlValidator + /// Creates an uninitialized instance. /// public DocValidator() { @@ -26,9 +26,9 @@ public DocValidator() //Methods /// - /// Adds given xml schema into the schema set. + /// Adds the specified xml schema into the schema set. /// - /// Xml schema to be added + /// The xml schema to be added. public void AddSchema(XmlSchema xmlSchema) { //Add the schema into the schema set @@ -37,9 +37,9 @@ public void AddSchema(XmlSchema xmlSchema) } /// - /// Loads xml schema from a given stream and adds it into the schema set. + /// Loads the xml schema from a stream and adds it into the schema set. /// - /// A stream from which to load the xml schema + /// The stream to load from. public void AddSchema(Stream schemaStream) { //Load the schema @@ -50,10 +50,12 @@ public void AddSchema(Stream schemaStream) } /// - /// Creates a new XDocument and loads its content from a given file. - /// Xml document is validated against the stored SchemaSet + /// Loads the xml document from file. /// - /// File containing the xml content + /// + /// The xml document is validated against the internal SchemaSet. + /// + /// The name of the xml file. public XDocument LoadXDocFromFile(string filename) { var binDir = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); @@ -64,10 +66,12 @@ public XDocument LoadXDocFromFile(string filename) /// - /// Creates a new XDocument and loads its content from a given string. - /// Xml document is validated against the stored SchemaSet + /// Loads the xml document from string. /// - /// A string containing the xml content + /// + /// The xml document is validated against the internal SchemaSet. + /// + /// The xml content. public XDocument LoadXDocFromString(string xmlContent) { @@ -77,7 +81,7 @@ public XDocument LoadXDocFromString(string xmlContent) } /// - /// Callback function called during validations. + /// The callback function called during the xml validation. /// private void XmlValidationCallback(object sender, ValidationEventArgs args) { diff --git a/Readme.md b/Readme.md index 5e90d87..c44f846 100644 --- a/Readme.md +++ b/Readme.md @@ -66,127 +66,128 @@ Input data is standardly located in the "Data" sub-folder relative to the locati ### Code metrics |Maintenance index|Cyclomatic complexity|Depth of inheritance|Code lines|Executable code lines| |--|--|--|--|--| -|82 (green)|7010|3|50199|11177| +|82 (green)|7379|3|53090|11767| Follows list of components in logical order from basic to composite and complex. ### Math |Component|Description| |--|--| -|[BasicStat](./RCNet/MathTools/BasicStat.cs)|Provides basic statistics of given data (averages, sum of squares, standard deviation, etc.)| -|[WeightedAvg](./RCNet/MathTools/WeightedAvg.cs)|Computes weighted average of given value/weight data pairs| -|[MovingDataWindow](./RCNet/MathTools/MovingDataWindow.cs)|Implements moving data window providing additional functions such as statistics, weighted average, etc.| -|[ODENumSolver](./RCNet/MathTools/Differential/ODENumSolver.cs)|Implements ordinary differential equations (ODE) numerical solver supporting Euler and RK4 methods| -|[Vector](./RCNet/MathTools/VectorMath/Vector.cs)|Implements vector of double values supporting basic mathematical operations| -|[Matrix](./RCNet/MathTools/MatrixMath/Matrix.cs)|Implements matrix of double values supporting basic mathematical operations. Contains buit-in Power Iteration method for the largest eigen value quick estimation| -|[EVD](./RCNet/MathTools/MatrixMath/EVD.cs)|Full eigen values and vectors decomposition of a squared matrix| -|[SVD](./RCNet/MathTools/MatrixMath/SVD.cs)|Singular values decomposition of a matrix| -|[QRD](./RCNet/MathTools/MatrixMath/QRD.cs)|QR decomposition of a matrix| -|[LUD](./RCNet/MathTools/MatrixMath/LUD.cs)|LU decomposition of a squared matrix| -|[ParamSeeker](./RCNet/MathTools/PS/ParamSeeker.cs)|Implements an error driven iterative search for the best value of a given parameter| -|[HurstExpEstim](./RCNet/MathTools/Hurst/HurstExpEstim.cs)|Implements Rescalled range and Hurst exponent estimator. It can be used to evaluate level of data randomness| -|["RandomValue"](https://github.com/okozelsk/NET/tree/master/RCNet/RandomValue)|Supports Uniform, Gaussian, Exponential and Gamma distributions. Here is [extension code](./RCNet/Extensions/RandomExtensions.cs)| +|[BasicStat](./RCNet/MathTools/BasicStat.cs)|Implements the basic statistics of sample data.| +|[WeightedAvg](./RCNet/MathTools/WeightedAvg.cs)|Implements the weighted average.| +|[MovingDataWindow](./RCNet/MathTools/MovingDataWindow.cs)|Implements the moving data window providing additional functions such as statistics, weighted average, etc.| +|[ODENumSolver](./RCNet/MathTools/Differential/ODENumSolver.cs)|Implements a simple numerical solver of the Ordinary Differential Equation(s).| +|[Vector](./RCNet/MathTools/VectorMath/Vector.cs)|Implements the vector.| +|[Matrix](./RCNet/MathTools/MatrixMath/Matrix.cs)|Implements the real matrix. It does not support the sparse matrix format.| +|[EVD](./RCNet/MathTools/MatrixMath/EVD.cs)|Implements the Eigenvalue decomposition of a square matrix.| +|[SVD](./RCNet/MathTools/MatrixMath/SVD.cs)|Implements the Singular Value decomposition of a matrix.| +|[QRD](./RCNet/MathTools/MatrixMath/QRD.cs)|Implements the QR decomposition of a matrix.| +|[LUD](./RCNet/MathTools/MatrixMath/LUD.cs)|Implements the LU (Lowed-Upper) decomposition of a square matrix.| +|[ParamValFinder](./RCNet/MathTools/ParamValFinder.cs)|Implements a simple iterative error-driven search for the parameter's optimal value.| +|[HurstExpEstim](./RCNet/MathTools/Hurst/HurstExpEstim.cs)|Implements the Hurst Exponent estimator using the rescaled range analysis.| +|["RandomValue"](https://github.com/okozelsk/NET/tree/master/RCNet/RandomValue)|Implements the random value. Supports Uniform, Gaussian, Exponential and Gamma distributions. Here is an [extension code](./RCNet/Extensions/RandomExtensions.cs)| |[Others](https://github.com/okozelsk/NET/tree/master/RCNet/MathTools)|Set of small additional helper components like PhysUnit, Interval, Bitwise, Combinatorics, Discrete,...| ### XML handling |Component|Description| |--|--| -|[DocValidator](./RCNet/XmlTools/DocValidator.cs)|Helper class for xml document loading and validation| +|[DocValidator](./RCNet/XmlTools/DocValidator.cs)|Implements the xml document loader and validator.| ### Data generators |Component|Description| |--|--| -|[PulseGenerator](./RCNet/Neural/Data/Generators/PulseGenerator.cs)|Generates constant pulses having specified average period. Pulse leaks follow specified random distribution or the constant.| -|[MackeyGlassGenerator](./RCNet/Neural/Data/Generators/MackeyGlassGenerator.cs)|Generates Mackey-Glass chaotic signal| -|[RandomGenerator](./RCNet/Neural/Data/Generators/RandomGenerator.cs)|Generates random signal following specified distribution| -|[SinusoidalGenerator](./RCNet/Neural/Data/Generators/SinusoidalGenerator.cs)|Generates sinusoidal signal| +|[PulseGenerator](./RCNet/Neural/Data/Generators/PulseGenerator.cs)|Implements the constant pulse generator.| +|[MackeyGlassGenerator](./RCNet/Neural/Data/Generators/MackeyGlassGenerator.cs)|Implements the Mackey-Glass generator.| +|[RandomGenerator](./RCNet/Neural/Data/Generators/RandomGenerator.cs)|Implements the random signal generator.| +|[SinusoidalGenerator](./RCNet/Neural/Data/Generators/SinusoidalGenerator.cs)|Implements the sinusoidal signal generator.| ### Data Filtering |Component|Description| |--|--| -|[BinFeatureFilter](./RCNet/Neural/Data/Filter/BinFeatureFilter.cs)|Binary (0/1) feature filter| -|[EnumFeatureFilter](./RCNet/Neural/Data/Filter/EnumFeatureFIlter.cs)|Enumeration (1..N) feature filter| -|[RealFeatureFilter](./RCNet/Neural/Data/Filter/RealFeatureFilter.cs)|Real number feature filter supporting standardization and range reserve for handling of unseen data in the future| +|[BinFeatureFilter](./RCNet/Neural/Data/Filter/BinFeatureFilter.cs)|Implements the binary feature filter.| +|[RealFeatureFilter](./RCNet/Neural/Data/Filter/RealFeatureFilter.cs)|Implements the real number feature filter.| ### Chainable Input Data Transformations |Component|Description| |--|--| -|[CDivTransformer](./RCNet/Neural/Data/Transformers/CDivTransformer.cs)|Provides "constant divided by an input field value" transformation| -|[DiffTransformer](./RCNet/Neural/Data/Transformers/DiffTransformer.cs)|Transforms input field value as a difference between current value and a past value| -|[DivTransformer](./RCNet/Neural/Data/Transformers/DivTransformer.cs)|Divides the value of the first input field by the value of the second input field| -|[ExpTransformer](./RCNet/Neural/Data/Transformers/ExpTransformer.cs)|Specified base powered by an input field value| -|[LinearTransformer](./RCNet/Neural/Data/Transformers/LinearTransformer.cs)|Two input fields linear transformation (a*X + b*Y)| -|[LogTransformer](./RCNet/Neural/Data/Transformers/LogTransformer.cs)|Transforms input field value to its logarithm of specified base| -|[MulTransformer](./RCNet/Neural/Data/Transformers/MulTransformer.cs)|Multiplies the value of the first input field by the value of the second input field| -|[MWStatTransformer](./RCNet/Neural/Data/Transformers/MWStatTransformer.cs)|Keeps stat of input field recent values and provides statistical features as a transformed values (Sum, NegSum, PosSum, SumOfSquares, Min, Max, Mid, Span, ArithAvg, MeanSquare, RootMeanSquare, Variance, StdDev, SpanDev)| -|[PowerTransformer](./RCNet/Neural/Data/Transformers/PowerTransformer.cs)|Transforms input field value to value^exponent| -|[YeoJohnsonTransformer](./RCNet/Neural/Data/Transformers/YeoJohnsonTransformer.cs)|Applies Yeo-Johnson transformation to input field value. See the [wiki pages](https://en.wikipedia.org/wiki/Power_transform#Yeo%E2%80%93Johnson_transformation).| +|[CDivTransformer](./RCNet/Neural/Data/Transformers/CDivTransformer.cs)|Implements the "constant divided by an input value" transformation.| +|[DiffTransformer](./RCNet/Neural/Data/Transformers/DiffTransformer.cs)|Implements the transformation of the input field value as the difference between the current field value and the past value.| +|[DivTransformer](./RCNet/Neural/Data/Transformers/DivTransformer.cs)|Implements the "two input fields division" transformation.| +|[ExpTransformer](./RCNet/Neural/Data/Transformers/ExpTransformer.cs)|Implements the exponential transformation of the input field ("Base^Input field value").| +|[LinearTransformer](./RCNet/Neural/Data/Transformers/LinearTransformer.cs)|Implements the linear transformation. Uses the values of the two input fields and computes (a*X + b*Y).| +|[LogTransformer](./RCNet/Neural/Data/Transformers/LogTransformer.cs)|Implements transformation of the input field value to its logarithm of the specified base.| +|[MulTransformer](./RCNet/Neural/Data/Transformers/MulTransformer.cs)|Implements the multiplication transformation. It multiplies the value of the first field by the value of the second field.| +|[MWStatTransformer](./RCNet/Neural/Data/Transformers/MWStatTransformer.cs)|Implements the statistical transformation. It keeps statistics of the input field recent values and provides specified statistical figure as the transformed value.| +|[PowerTransformer](./RCNet/Neural/Data/Transformers/PowerTransformer.cs)|Implements the power transformation. It computes value^exponent.| +|[YeoJohnsonTransformer](./RCNet/Neural/Data/Transformers/YeoJohnsonTransformer.cs)|Implements the Yeo-Johnson transformation. See the [wiki pages](https://en.wikipedia.org/wiki/Power_transform#Yeo%E2%80%93Johnson_transformation).| ### Analog to spikes data coding |Component|Description| |--|--| -|[A2SCoderSignalStrength](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderSignalStrength.cs)|Implements signal strength coder meeting two important spike-train conditions together: 1. Frequency - as stronger value as higher spiking frequency. 2. Time to first spike - as stronger value as earlier spike.| -|[A2SCoderGaussianReceptors](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderGaussianReceptors.cs)|Implements Gussian Receptive Fields coder.| -|[A2SCoderUpDirArrows](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderUpDirArrows.cs)|Implements a signal direction receptor, sensitive to upward direction against a historical value at time T-1..number of receptors.| -|[A2SCoderDownDirArrows](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderDownDirArrows.cs)|Implements a signal direction receptor, sensitive to downward direction against a historical value at time T-1..number of receptors.| +|[A2SCoderSignalStrength](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderSignalStrength.cs)|Implements the signal strength coder. Uses a novel coding algorithm to have met the two important spike-train conditions together: 1. The frequency - as stronger value as higher spiking frequency. 2. The time to a first spike - as stronger value as earlier spike.| +|[A2SCoderGaussianReceptors](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderGaussianReceptors.cs)|Implements the Gussian Receptive Fields coder.| +|[A2SCoderUpDirArrows](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderUpDirArrows.cs)|Implements the upward signal direction receptor. The receptor is sensitive to upward direction against a past value at the time T-1...T-number of receptors.| +|[A2SCoderDownDirArrows](./RCNet/Neural/Data/Coders/AnalogToSpiking/A2SCoderDownDirArrows.cs)|Implements the downward signal direction receptor. The receptor is sensitive to downward direction against a past value at the time T-1...T-number of receptors.| ### Data holding |Component|Description| |--|--| -|[SimpleQueue](./RCNet/Queue/SimpleQueue.cs)|Implements quick and simple FIFO queue (template). Supports access to enqueued elements.| -|[DelimitedStringValues](./RCNet/CsvTools/DelimitedStringValues.cs)|Helper encoder and decoder of data line in csv format| -|[CsvDataHolder](./RCNet/CsvTools/CsvDataHolder.cs)|Provides simple loading and saving of csv data| -|[VectorBundle](./RCNet/Neural/Data/VectorBundle.cs)|Bundle of input data vectors and corresponding desired output vectors (1:1). Supports upload from csv file| -|[InputPattern](./RCNet/Neural/Data/InputPattern.cs)|Input pattern supporting signal detection, unification and resampling features| -|[ResultBundle](./RCNet/Neural/Data/ResultBundle.cs)|Bundle of input, computed and desired output vectors (1:1:1)| +|[SimpleQueue](./RCNet/Queue/SimpleQueue.cs)|Implements a simple FIFO queue template. Supports access to enqueued elements so it can be also used as the moving data window.| +|[DelimitedStringValues](./RCNet/CsvTools/DelimitedStringValues.cs)|Implements the single row of the delimited string values (csv format).| +|[CsvDataHolder](./RCNet/CsvTools/CsvDataHolder.cs)|Implements the simple loading and saving of csv data.| +|[VectorBundle](./RCNet/Neural/Data/VectorBundle.cs)|Implements the bundle of the input and output vector pairs.| +|[InputPattern](./RCNet/Neural/Data/InputPattern.cs)|Implements an input pattern. Pattern can be both univariate or multivariate. Supports data resampling (including simple detection of signal begin/end) and amplitude unification.| +|[ResultBundle](./RCNet/Neural/Data/ResultBundle.cs)|Implements the bundle of input, computed and ideal (desired) data vectors.| ### Analog activation functions (stateless) See the [wiki pages.](https://en.wikipedia.org/wiki/Activation_function) |Component|Description| |--|--| -|[AFAnalogBentIdentity](./RCNet/Neural/Activation/AFAnalogBentIdentity.cs)|Bent identity activation function| -|[AFAnalogSQNL](./RCNet/Neural/Activation/AFAnalogSQNL.cs)|Square nonlinearity activation function| -|[AFAnalogElliot](./RCNet/Neural/Activation/AFAnalogElliot.cs)|Elliot activation function (aka Softsign)| -|[AFAnalogGaussian](./RCNet/Neural/Activation/AFAnalogGaussian.cs)|Gaussian activation function| -|[AFAnalogIdentity](./RCNet/Neural/Activation/AFAnalogIdentity.cs)|Identity activation function (aka Linear)| -|[AFAnalogISRU](./RCNet/Neural/Activation/AFAnalogISRU.cs)|ISRU (Inverse Square Root Unit) activation function| -|[AFAnalogLeakyReLU](./RCNet/Neural/Activation/AFAnalogLeakyReLU.cs)|Leaky ReLU (Leaky Rectified Linear Unit) activation function| -|[AFAnalogSigmoid](./RCNet/Neural/Activation/AFAnalogSinusoid.cs)|Sigmoid activation function| -|[AFAnalogSinc](./RCNet/Neural/Activation/AFAnalogSinc.cs)|Sinc activation function| -|[AFAnalogSinusoid](./RCNet/Neural/Activation/AFAnalogSinusoid.cs)|Sinusoid activation function| -|[AFAnalogSoftExponential](./RCNet/Neural/Activation/AFAnalogSoftExponential.cs)|Soft exponential activation function| -|[AFAnalogSoftMax](./RCNet/Neural/Activation/AFAnalogSoftMax.cs)|Soft Max activation function| -|[AFAnalogSoftPlus](./RCNet/Neural/Activation/AFAnalogSoftPlus.cs)|Soft Plus activation function| -|[AFAnalogTanH](./RCNet/Neural/Activation/AFAnalogTanH.cs)|TanH activation function| +|[AFAnalogBentIdentity](./RCNet/Neural/Activation/AFAnalogBentIdentity.cs)|Implements the Bent Identity activation function.| +|[AFAnalogElliot](./RCNet/Neural/Activation/AFAnalogElliot.cs)|Implements the Elliot (aka Softsign) activation function.| +|[AFAnalogGaussian](./RCNet/Neural/Activation/AFAnalogGaussian.cs)|Implements the Gaussian activation function.| +|[AFAnalogIdentity](./RCNet/Neural/Activation/AFAnalogIdentity.cs)|Implements the Identity activation function (aka Linear).| +|[AFAnalogISRU](./RCNet/Neural/Activation/AFAnalogISRU.cs)|Implements the ISRU (Inverse Square Root Unit) activation function.| +|[AFAnalogLeakyReLU](./RCNet/Neural/Activation/AFAnalogLeakyReLU.cs)|Implements the LeakyReLU (Leaky Rectified Linear Unit) activation function.| +|[AFAnalogSigmoid](./RCNet/Neural/Activation/AFAnalogSinusoid.cs)|Implements the Sigmoid activation function.| +|[AFAnalogSinc](./RCNet/Neural/Activation/AFAnalogSinc.cs)|Implements the Sinc activation function.| +|[AFAnalogSinusoid](./RCNet/Neural/Activation/AFAnalogSinusoid.cs)|Implements the Sinusoid activation function.| +|[AFAnalogSoftExponential](./RCNet/Neural/Activation/AFAnalogSoftExponential.cs)|Implements the Soft Exponential activation function.| +|[AFAnalogSoftMax](./RCNet/Neural/Activation/AFAnalogSoftMax.cs)|Implements the Soft Max activation function.| +|[AFAnalogSoftPlus](./RCNet/Neural/Activation/AFAnalogSoftPlus.cs)|Implements the Soft Plus activation function.| +|[AFAnalogSQNL](./RCNet/Neural/Activation/AFAnalogSQNL.cs)|Implements the Square Nonlinearity activation function.| +|[AFAnalogTanH](./RCNet/Neural/Activation/AFAnalogTanH.cs)|Implements the Hyperbolic Tangent activation function.| ### Spiking activation functions (stateful) See the [wiki pages.](https://en.wikipedia.org/wiki/Biological_neuron_model) |Component|Description| |--|--| -|[AFSpikingSimpleIF](./RCNet/Neural/Activation/AFSpikingSimpleIF.cs)|Simple Integrate and Fire activation function| -|[AFSpikingLeakyIF](./RCNet/Neural/Activation/AFSpikingLeakyIF.cs)|Leaky Integrate and Fire activation function| -|[AFSpikingExpIF](./RCNet/Neural/Activation/AFSpikingExpIF.cs)|Exponential Integrate and Fire activation function| -|[AFSpikingAdExpIF](./RCNet/Neural/Activation/AFSpikingAdExpIF.cs)|Adaptive Exponential Integrate and Fire activation function| -|[AFSpikingIzhikevichIF](./RCNet/Neural/Activation/AFSpikingIzhikevichIF.cs)|Izhikevich Integrate and Fire activation function (model "one fits all")| +|[AFSpikingAdExpIF](./RCNet/Neural/Activation/AFSpikingAdExpIF.cs)|Implements the Adaptive Exponential Integrate and Fire neuron model.| +|[AFSpikingExpIF](./RCNet/Neural/Activation/AFSpikingExpIF.cs)|Implements the Exponential Integrate and Fire neuron model.| +|[AFSpikingIzhikevichIF](./RCNet/Neural/Activation/AFSpikingIzhikevichIF.cs)|Implements the Izhikevich Integrate and Fire neuron model.| +|[AFSpikingLeakyIF](./RCNet/Neural/Activation/AFSpikingLeakyIF.cs)|Implements the Leaky Integrate and Fire neuron model.| +|[AFSpikingSimpleIF](./RCNet/Neural/Activation/AFSpikingSimpleIF.cs)|Implements a very simple form of Integrate and Fire neuron model.| ### Non-recurrent networks and trainers |Component|Description| |--|--| -|[FeedForwardNetwork](./RCNet/Neural/Network/NonRecurrent/FF/FeedForwardNetwork.cs)|Implements the feed forward network supporting multiple hidden layers| -|[RPropTrainer](./RCNet/Neural/Network/NonRecurrent/FF/RPropTrainer.cs)|Resilient propagation (iRPROP+) trainer of the feed forward network| -|[QRDRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/QRDRegrTrainer.cs)|Implements the linear regression (QR decomposition) trainer of the feed forward network. This is the special case trainer for FF network having no hidden layers and Identity output activation function| -|[RidgeRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/RidgeRegrTrainer.cs)|Implements the ridge linear regression trainer of the feed forward network. This is the special case trainer for FF network having no hidden layers and Identity output activation function| -|[ElasticRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/ElasticRegrTrainer.cs)|Implements the elastic net trainer of the feed forward network. This is the special case trainer for FF network having no hidden layers and Identity output activation function| +|[FeedForwardNetwork](./RCNet/Neural/Network/NonRecurrent/FF/FeedForwardNetwork.cs)|Implements the feed forward network supporting multiple hidden layers.| +|[RPropTrainer](./RCNet/Neural/Network/NonRecurrent/FF/RPropTrainer.cs)|Implements the Resilient Backpropagation iRPROP+ trainer of the feed forward network.| +|[QRDRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/QRDRegrTrainer.cs)|Implements the QRD regression trainer of the feed forward network. The feed forward network to be trained must have no hidden layers and the Identity output activation.| +|[RidgeRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/RidgeRegrTrainer.cs)|Implements the ridge regression trainer of the feed forward network. The feed forward network to be trained must have no hidden layers and the Identity output activation.| +|[ElasticRegrTrainer](./RCNet/Neural/Network/NonRecurrent/FF/ElasticRegrTrainer.cs)|Implements the elastic linear regression trainer of the feed forward network.. The feed forward network to be trained must have no hidden layers and the Identity output activation.| ||| -|[ParallelPerceptron](./RCNet/Neural/Network/NonRecurrent/PP/ParallelPerceptron.cs)|Implements the parallel perceptron network| -|[PDeltaRuleTrainer](./RCNet/Neural/Network/NonRecurrent/PP/PDeltaRuleTrainer.cs)|P-Delta rule trainer of the parallel perceptron network| +|[ParallelPerceptron](./RCNet/Neural/Network/NonRecurrent/PP/ParallelPerceptron.cs)|Implements the parallel perceptron network.| +|[PDeltaRuleTrainer](./RCNet/Neural/Network/NonRecurrent/PP/PDeltaRuleTrainer.cs)|Implements the p-delta rule trainer of the parallel perceptron network.| ||| -|[TrainedNetwork](./RCNet/Neural/Network/NonRecurrent/TrainedNetwork.cs)|Encapsulates trained non-recurrent (Feed forward or Parallel perceptron) network and related error statistics.| -|[TrainedNetworkBuilder](./RCNet/Neural/Network/NonRecurrent/TrainedNetworkBuilder.cs)|Builds single trained (Feed forward or Parallel perceptron) network. Performs training epochs and offers control to user to evaluate the network.| -|[TrainedNetworkCluster](./RCNet/Neural/Network/NonRecurrent/TrainedNetworkCluster.cs)|Encapsulates set of trained non-recurrent networks (cluster of TrainedNetwork instances) and related error statistics. Offers sub-predictions of inner member networks, weighted prediction and also prediction of the 2nd level network.| -|[TrainedNetworkClusterBuilder](./RCNet/Neural/Network/NonRecurrent/TrainedNetworkClusterBuilder.cs)|Builds cluster of trained networks based on x-fold cross validation approach. Each fold can have associated number of various networks.| +|[TNRNet](./RCNet/Neural/Network/NonRecurrent/TNRNet.cs)|Implements the holder of trained non-recurrent network and its error statistics.| +|[TNRNetBuilder](./RCNet/Neural/Network/NonRecurrent/TNRNetBuilder.cs)|Implements the builder of the trained non-recurrent network.| +|[TNRNetCluster](./RCNet/Neural/Network/NonRecurrent/TNRNetCluster.cs)|Implements the cluster of the trained non-recurrent networks. The cluster is based on the cross-validation approach.| +|[TNRNetClusterBuilder](./RCNet/Neural/Network/NonRecurrent/TNRNetClusterBuilder.cs)|Implements the builder of the cluster of the trained non-recurrent networks.| +|[TNRNetClusterChain](./RCNet/Neural/Network/NonRecurrent/TNRNetClusterChain.cs)|Implements the chain of the cooperating clusters. The chain can contain one or more clusters.| +|[TNRNetClusterChainBuilder](./RCNet/Neural/Network/NonRecurrent/TNRNetClusterChainBuilder.cs)|Implements the builder of the chain of cooperating clusters.| ### State Machine components @@ -194,18 +195,20 @@ See the [wiki pages.](https://en.wikipedia.org/wiki/Biological_neuron_model) |Component|Description| |--|--| -|[InputEncoder](./RCNet/Neural/Network/SM/Preprocessing/Input/InputEncoder.cs)|Processes given natural external input data and provides it's representation on analog and spiking input neurons for the input synapses of the reservoirs. Supports set of various realtime input chainable data transformations and data generators as additional computed input fields. Supports two main input feeding regimes: Continuous (one input is a variables data vector at time T) and Patterned (one input is an InputPattern containing variables data for all timepoints). Supports three ways how to encode analog input value as the spikes: Horizontal (fast - simultaneous spiking activity of the large neuronal population), Vertical (slow - spike-train on single input neuron) or Forbidden (fast - spiking represetantion is then forbidden and analog values are directly used instead).| -|[AnalogInputNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/AnalogInputNeuron.cs)|Provides analog signal for input synapses.| -|[SpikingInputNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/SpikingInputNeuron.cs)|Provides spiking signal for input synapses.| -|[HiddenNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/HiddenNeuron.cs)|Supports engagement of both analog and spiking activation functions and provides unified set of predictors.| -|[Synapse](./RCNet/Neural/Network/SM/Preprocessing/Reservoir/Synapse/Synapse.cs)|Computes weighted signal from source to target neuron. It supports signal delaying and short-term plasticity (Constant, Linear and non-Linear Facilitation x Depression dynamics models of the efficacy).| -|[ReservoirInstance](./RCNet/Neural/Network/SM/Preprocessing/Reservoir/ReservoirInstance.cs)|Provides recurrent network of hidden neurons. Supports SpectralRadius for weights of analog neurons, Homogenous excitability of spiking neurons, Multiple 3D pools of neurons, Pool to pool connections. It can be configured as the Echo State Network reservoir, Liquid State Machine reservoir or hybrid reservoir| -|[NeuralPreprocessor](./RCNet/Neural/Network/SM/Preprocessing/NeuralPreprocessor.cs)|Encaptulates InputEncoder and reservoirs. Provides encaptulated data preprocessing to predictors for the readout layer| -|[ReadoutUnit](./RCNet/Neural/Network/SM/Readout/ReadoutUnit.cs)|Readout unit does the Forecast or Classification and encapsulates TrainedNetworkCluster.| -|[ReadoutLayer](./RCNet/Neural/Network/SM/Readout/ReadoutLayer.cs)|Implements independent readout layer consisting of trained readout units.| - -### State Machine component -The main serializable [StateMachine](./RCNet/Neural/Network/SM/StateMachine.cs) component encapsulates independent NeuralPreprocessor and ReadoutLayer components into the single component and adds support for routing specific predictors and input fields to the specific readout units. Allows to bypass NeuralPreprocessor and to use input data directly as a predictors for the readout layer. +|[AnalogInputNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/AnalogInputNeuron.cs)|Implements the input analog neuron. The input analog neuron is a special case of the neuron without an activation function. Its purpose is to provide an analog input for the reservoir's synapses.| +|[SpikingInputNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/SpikingInputNeuron.cs)|Implements the input spiking neuron. The input spiking neuron is a special case of the neuron without an activation function. Its purpose is to provide a spiking input for the reservoir's synapses.| +|[InputEncoder](./RCNet/Neural/Network/SM/Preprocessing/Input/InputEncoder.cs)|Implements a mediation layer between the external input data and the internal reservoirs of the neural preprocessor. Processes the external input data in the natural form and provides it's representation on analog and spiking input neurons for the next processing in the reservoirs. Allows to create new computed input fields using chainable transformations of existing external input fields, as well as adding independently generated input fields using various generators. Supports two input feeding modes: Continuous and Patterned. The Continuous feeding mode processes an input vector as the variable(s) values at the single time-point T. The Patterned feeding mode processes an input vector as an alone input pattern consisting of a time series of the variable(s) values. Supports three ways how to represent an analog value as the spikes: Horizontal, Vertical or Forbidden. The Horizontal way of coding means a simultaneous activity of the neuronal population where every input field is coded by several spiking input neurons (a horizontal spike-train). It is fast, it leads to a single computation cycle of the reservoirs per the input field value. The Vertical way of coding means that the input field value is coded as a spike-train on a single spiking input neuron. It is slower, it leads to multiple computation cycles of the reservoirs according to the spike-train length. The Forbidden way of coding means there is no coding of an analog value as the spikes. It is fast, it leads to a single computation cycle of the reservoirs per the input field value and it does not utilize any spiking input neuron(s).| +|[Synapse](./RCNet/Neural/Network/SM/Preprocessing/Reservoir/Synapse/Synapse.cs)|Implements the synapse. Supports the signal delaying and the short-term plasticity.| +|[PredictorsProvider](./RCNet/Neural/Network/SM/Preprocessing/Neuron/Predictor/PredictorsProvider.cs)|Implements the provider of the unified set of computed predictors.| +|[HiddenNeuron](./RCNet/Neural/Network/SM/Preprocessing/Neuron/HiddenNeuron.cs)|Implements the hidden neuron. Supports engagement of both analog and spiking activation functions and provides unified set of available predictors.| +|[ReservoirInstance](./RCNet/Neural/Network/SM/Preprocessing/Reservoir/ReservoirInstance.cs)|Implements the reservoir.| +|[NeuralPreprocessor](./RCNet/Neural/Network/SM/Preprocessing/NeuralPreprocessor.cs)|Implements the neural preprocessor supporting multiple reservoirs.| +|[ReadoutUnit](./RCNet/Neural/Network/SM/Readout/ReadoutUnit.cs)|Implements the readout unit of the readout layer. It can do the Forecast or Classification.| +|[OneTakesAllGroup](./RCNet/Neural/Network/SM/Readout/OneTakesAllGroup.cs)|Implements the "One Takes All" group of the readout layer. Supports the basic winner decision or advanced decision by the trained cluster chain.| +|[ReadoutLayer](./RCNet/Neural/Network/SM/Readout/ReadoutLayer.cs)|Implements the readout layer consisting of trained readout units (and "One Takes All" groups).| + +### The State Machine component +The main serializable [StateMachine](./RCNet/Neural/Network/SM/StateMachine.cs) component encapsulates the NeuralPreprocessor and ReadoutLayer components into the single component and adds support for routing of specific predictors and input fields to the specific readout units. State Machine also allows to bypass NeuralPreprocessor and to use input data directly as a predictors for the readout layer. #### Setup Each executive component that makes up StateMachine (including StateMachine itself) has its own related settings class providing configuration, which is required by the executive component's constructor.