Reimplement Adaline
This commit is contained in:
93
src/test/java/adaline/AdalineTest.java
Normal file
93
src/test/java/adaline/AdalineTest.java
Normal file
@@ -0,0 +1,93 @@
|
||||
package adaline;
|
||||
|
||||
|
||||
import com.naaturel.ANN.domain.abstraction.Neuron;
|
||||
import com.naaturel.ANN.domain.abstraction.TrainingStep;
|
||||
import com.naaturel.ANN.domain.model.dataset.DataSet;
|
||||
import com.naaturel.ANN.domain.model.dataset.DatasetExtractor;
|
||||
import com.naaturel.ANN.domain.model.neuron.*;
|
||||
import com.naaturel.ANN.domain.model.training.TrainingPipeline;
|
||||
import com.naaturel.ANN.implementation.adaline.AdalineTrainingContext;
|
||||
import com.naaturel.ANN.implementation.gradientDescent.*;
|
||||
import com.naaturel.ANN.implementation.neuron.SimplePerceptron;
|
||||
import com.naaturel.ANN.implementation.simplePerceptron.SimpleCorrectionStrategy;
|
||||
import com.naaturel.ANN.implementation.simplePerceptron.SimpleDeltaStrategy;
|
||||
import com.naaturel.ANN.implementation.simplePerceptron.SimpleErrorRegistrationStrategy;
|
||||
import com.naaturel.ANN.implementation.simplePerceptron.SimplePredictionStrategy;
|
||||
import com.naaturel.ANN.implementation.training.steps.*;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class AdalineTest {
|
||||
|
||||
private DataSet dataset;
|
||||
private AdalineTrainingContext context;
|
||||
|
||||
private List<Synapse> synapses;
|
||||
private Bias bias;
|
||||
private Network network;
|
||||
|
||||
private TrainingPipeline pipeline;
|
||||
|
||||
@BeforeEach
|
||||
public void init(){
|
||||
dataset = new DatasetExtractor()
|
||||
.extract("C:/Users/Laurent/Desktop/ANN-framework/src/main/resources/assets/and-gradient.csv");
|
||||
|
||||
List<Synapse> syns = new ArrayList<>();
|
||||
syns.add(new Synapse(new Input(0), new Weight(0)));
|
||||
syns.add(new Synapse(new Input(0), new Weight(0)));
|
||||
|
||||
bias = new Bias(new Weight(0));
|
||||
|
||||
Neuron neuron = new SimplePerceptron(syns, bias, new Linear());
|
||||
Layer layer = new Layer(List.of(neuron));
|
||||
network = new Network(List.of(layer));
|
||||
|
||||
context = new AdalineTrainingContext();
|
||||
context.dataset = dataset;
|
||||
context.model = network;
|
||||
|
||||
List<TrainingStep> steps = List.of(
|
||||
new PredictionStep(new SimplePredictionStrategy(context)),
|
||||
new DeltaStep(new SimpleDeltaStrategy(context)),
|
||||
new LossStep(new SquareLossStrategy(context)),
|
||||
new ErrorRegistrationStep(new SimpleErrorRegistrationStrategy(context)),
|
||||
new WeightCorrectionStep(new SimpleCorrectionStrategy(context))
|
||||
);
|
||||
|
||||
pipeline = new TrainingPipeline(steps)
|
||||
.stopCondition(ctx -> ctx.globalLoss <= 0.1329F || ctx.epoch > 10000)
|
||||
.beforeEpoch(ctx -> {
|
||||
ctx.globalLoss = 0.0F;
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test_the_whole_algorithm(){
|
||||
|
||||
List<Float> expectedGlobalLosses = List.of(
|
||||
0.501522F,
|
||||
0.498601F
|
||||
);
|
||||
|
||||
context.learningRate = 0.03F;
|
||||
pipeline.afterEpoch(ctx -> {
|
||||
ctx.globalLoss /= context.dataset.size();
|
||||
|
||||
int index = ctx.epoch-1;
|
||||
if(index >= expectedGlobalLosses.size()) return;
|
||||
|
||||
//assertEquals(expectedGlobalLosses.get(index), context.globalLoss, 0.00001f);
|
||||
});
|
||||
|
||||
pipeline.run(context);
|
||||
assertEquals(214, context.epoch);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ public class GradientDescentTest {
|
||||
context = new GradientDescentTrainingContext();
|
||||
context.dataset = dataset;
|
||||
context.model = network;
|
||||
context.correctorTerms = new ArrayList<>();
|
||||
context.correctorTerms = new ArrayList<>();
|
||||
|
||||
List<TrainingStep> steps = List.of(
|
||||
new PredictionStep(new SimplePredictionStrategy(context)),
|
||||
@@ -92,7 +92,9 @@ public class GradientDescentTest {
|
||||
assertEquals(expectedGlobalLosses.get(index), context.globalLoss, 0.00001f);
|
||||
});
|
||||
|
||||
pipeline.run(context);
|
||||
pipeline
|
||||
.withVerbose(true)
|
||||
.run(context);
|
||||
assertEquals(67, context.epoch);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user