Initial commit

This commit is contained in:
2026-03-17 21:28:37 +01:00
commit dccb0e3607
28 changed files with 969 additions and 0 deletions

43
.gitignore vendored Normal file
View File

@@ -0,0 +1,43 @@
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
.kotlin
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

10
.idea/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,10 @@
# Default ignored files
/shelf/
/workspace.xml
# Ignored default folder with query files
/queries/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/

16
.idea/gradle.xml generated Normal file
View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleHome" value="" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
</GradleProjectSettings>
</option>
</component>
</project>

10
.idea/misc.xml generated Normal file
View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="21" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

20
build.gradle.kts Normal file
View File

@@ -0,0 +1,20 @@
plugins {
id("java")
}
group = "be.naaturel"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
}
dependencies {
testImplementation(platform("org.junit:junit-bom:5.10.0"))
testImplementation("org.junit.jupiter:junit-jupiter")
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
}
tasks.test {
useJUnitPlatform()
}

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,6 @@
#Tue Mar 17 15:36:58 CET 2026
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

234
gradlew vendored Normal file
View File

@@ -0,0 +1,234 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

89
gradlew.bat vendored Normal file
View File

@@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
settings.gradle.kts Normal file
View File

@@ -0,0 +1 @@
rootProject.name = "ANN"

View File

@@ -0,0 +1,57 @@
package com.naaturel.ANN;
import com.naaturel.ANN.domain.abstraction.Neuron;
import com.naaturel.ANN.domain.model.dataset.DataSet;
import com.naaturel.ANN.domain.model.dataset.DataSetEntry;
import com.naaturel.ANN.domain.model.dataset.Label;
import com.naaturel.ANN.domain.model.neuron.Bias;
import com.naaturel.ANN.domain.model.neuron.Input;
import com.naaturel.ANN.domain.model.neuron.Synapse;
import com.naaturel.ANN.domain.model.neuron.Weight;
import com.naaturel.ANN.implementation.activationFunction.Heaviside;
import com.naaturel.ANN.implementation.activationFunction.Linear;
import com.naaturel.ANN.implementation.neuron.SimplePerceptron;
import com.naaturel.ANN.implementation.training.GradientDescentTraining;
import java.util.*;
public class Main {
public static void main(String[] args){
DataSet dataSet = new DataSet(Map.ofEntries(
Map.entry(new DataSetEntry(List.of(1.0F, 6.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(7.0F, 9.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(1.0F, 9.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(7.0F, 10.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(2.0F, 5.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(2.0F, 7.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(2.0F, 8.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(6.0F, 8.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(6.0F, 9.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(3.0F, 5.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(3.0F, 6.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(3.0F, 8.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(3.0F, 9.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(5.0F, 7.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(5.0F, 8.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(5.0F, 10.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(5.0F, 11.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(4.0F, 6.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(4.0F, 7.0F)), new Label(-1.0F)),
Map.entry(new DataSetEntry(List.of(4.0F, 9.0F)), new Label(1.0F)),
Map.entry(new DataSetEntry(List.of(4.0F, 10.0F)), new Label(1.0F))
));
List<Synapse> syns = new ArrayList<>();
syns.add(new Synapse(new Input(0), new Weight()));
syns.add(new Synapse(new Input(0), new Weight()));
Bias bias = new Bias(new Weight());
Neuron n = new SimplePerceptron(syns, bias, new Linear());
GradientDescentTraining st = new GradientDescentTraining();
st.train(n, 0.0003F, dataSet);
}
}

View File

@@ -0,0 +1,7 @@
package com.naaturel.ANN.domain.abstraction;
public interface ActivationFunction {
float accept(Neuron n);
}

View File

@@ -0,0 +1,55 @@
package com.naaturel.ANN.domain.abstraction;
import com.naaturel.ANN.domain.model.neuron.Bias;
import com.naaturel.ANN.domain.model.neuron.Input;
import com.naaturel.ANN.domain.model.neuron.Synapse;
import com.naaturel.ANN.domain.model.neuron.Weight;
import java.util.ArrayList;
import java.util.List;
public abstract class Neuron {
protected List<Synapse> synapses;
protected Bias bias;
protected ActivationFunction activationFunction;
public Neuron(List<Synapse> synapses, Bias bias, ActivationFunction func){
this.synapses = synapses;
this.bias = bias;
this.activationFunction = func;
}
public abstract float predict();
public abstract float calculateWeightedSum();
public int getSynCount(){
return this.synapses.size();
}
public void setInput(int index, Input input){
Synapse syn = this.synapses.get(index);
syn.setInput(input.getValue());
}
public Bias getBias(){
return this.bias;
}
public void updateBias(Weight weight) {
this.bias.setWeight(weight.getValue());
}
public Synapse getSynapse(int index){
return this.synapses.get(index);
}
public List<Synapse> getSynapses() {
return new ArrayList<>(this.synapses);
}
public void setWeight(int index, Weight weight){
Synapse syn = this.synapses.get(index);
syn.setWeight(weight.getValue());
}
}

View File

@@ -0,0 +1,13 @@
package com.naaturel.ANN.domain.abstraction;
public abstract class NeuronTrainer {
private Trainable trainable;
public NeuronTrainer(Trainable trainable){
this.trainable = trainable;
}
public abstract void train();
}

View File

@@ -0,0 +1,7 @@
package com.naaturel.ANN.domain.abstraction;
public interface Trainable {
}

View File

@@ -0,0 +1,30 @@
package com.naaturel.ANN.domain.model.dataset;
import java.util.*;
public class DataSet implements Iterable<DataSetEntry>{
private Map<DataSetEntry, Label> data;
public DataSet(){
this(new HashMap<>());
}
public DataSet(Map<DataSetEntry, Label> data){
this.data = data;
}
public List<DataSetEntry> getData(){
return new ArrayList<>(this.data.keySet());
}
public Label getLabel(DataSetEntry entry){
return this.data.get(entry);
}
@Override
public Iterator<DataSetEntry> iterator() {
return this.data.keySet().iterator();
}
}

View File

@@ -0,0 +1,40 @@
package com.naaturel.ANN.domain.model.dataset;
import java.util.*;
public class DataSetEntry implements Iterable<Float> {
private List<Float> data;
public DataSetEntry(List<Float> data){
this.data = data;
}
public List<Float> getData() {
return new ArrayList<>(data);
}
@Override
public int hashCode() {
return Objects.hash(this.data);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof DataSetEntry dataSetEntry)) return false;
return Objects.equals(this.data, dataSetEntry.data);
}
@Override
public Iterator<Float> iterator() {
return this.data.iterator();
}
@Override
public String toString() {
return Arrays.toString(this.data.toArray());
}
}

View File

@@ -0,0 +1,15 @@
package com.naaturel.ANN.domain.model.dataset;
public class Label {
private float value;
public Label(float value){
this.value = value;
}
public float getValue() {
return value;
}
}

View File

@@ -0,0 +1,8 @@
package com.naaturel.ANN.domain.model.neuron;
public class Bias extends Synapse {
public Bias(Weight weight) {
super(new Input(1), weight);
}
}

View File

@@ -0,0 +1,20 @@
package com.naaturel.ANN.domain.model.neuron;
public class Input {
private float value;
public Input(float value){
this.value = value;
}
public void setValue(float value){
this.value = value;
}
public float getValue(){
return this.value;
}
}

View File

@@ -0,0 +1,30 @@
package com.naaturel.ANN.domain.model.neuron;
public class Synapse {
private Input input;
private Weight weight;
public Synapse(Input input, Weight weight){
this.input = input;
this.weight = weight;
}
public float getInput(){
return this.input.getValue();
}
public void setInput(float value){
this.input.setValue(value);
}
public float getWeight() {
return weight.getValue();
}
public void setWeight(float value){
this.weight.setValue(value);
}
}

View File

@@ -0,0 +1,25 @@
package com.naaturel.ANN.domain.model.neuron;
import java.util.Random;
public class Weight {
private float value;
public Weight(){
this(new Random().nextFloat() * 2 - 1);
}
public Weight(float value){
this.value = value;
}
public void setValue(float value){
this.value = value;
}
public float getValue(){
return this.value;
}
}

View File

@@ -0,0 +1,17 @@
package com.naaturel.ANN.implementation.activationFunction;
import com.naaturel.ANN.domain.abstraction.ActivationFunction;
import com.naaturel.ANN.domain.abstraction.Neuron;
public class Heaviside implements ActivationFunction {
public Heaviside(){
}
@Override
public float accept(Neuron n) {
float weightedSum = n.calculateWeightedSum();
return weightedSum <= 0 ? 0:1;
}
}

View File

@@ -0,0 +1,13 @@
package com.naaturel.ANN.implementation.activationFunction;
import com.naaturel.ANN.domain.abstraction.ActivationFunction;
import com.naaturel.ANN.domain.abstraction.Neuron;
public class Linear implements ActivationFunction {
@Override
public float accept(Neuron n) {
return n.calculateWeightedSum();
}
}

View File

@@ -0,0 +1,32 @@
package com.naaturel.ANN.implementation.neuron;
import com.naaturel.ANN.domain.abstraction.ActivationFunction;
import com.naaturel.ANN.domain.abstraction.Neuron;
import com.naaturel.ANN.domain.abstraction.Trainable;
import com.naaturel.ANN.domain.model.neuron.Bias;
import com.naaturel.ANN.domain.model.neuron.Synapse;
import java.util.List;
public class SimplePerceptron extends Neuron implements Trainable {
public SimplePerceptron(List<Synapse> synapses, Bias b, ActivationFunction func) {
super(synapses, b, func);
}
@Override
public float predict() {
return activationFunction.accept(this);
}
@Override
public float calculateWeightedSum() {
float res = 0;
for(Synapse syn : super.synapses){
res += syn.getWeight() * syn.getInput();
}
return res;
}
}

View File

@@ -0,0 +1,4 @@
package com.naaturel.ANN.implementation.training;
public class AdalineTraining {
}

View File

@@ -0,0 +1,99 @@
package com.naaturel.ANN.implementation.training;
import com.naaturel.ANN.domain.abstraction.Neuron;
import com.naaturel.ANN.domain.model.dataset.DataSet;
import com.naaturel.ANN.domain.model.dataset.DataSetEntry;
import com.naaturel.ANN.domain.model.neuron.Bias;
import com.naaturel.ANN.domain.model.neuron.Input;
import com.naaturel.ANN.domain.model.neuron.Synapse;
import com.naaturel.ANN.domain.model.neuron.Weight;
import java.util.ArrayList;
import java.util.List;
public class GradientDescentTraining {
public GradientDescentTraining(){
}
public void train(Neuron n, float learningRate, DataSet dataSet) {
int epoch = 1;
int maxEpoch = 10000;
float errorThreshold = 0.125F;
float currentError;
do {
if(epoch > maxEpoch) break;
float biasCorrector = 0;
currentError = 0;
List<Float> correctorTerms = this.initCorrectorTerms(n.getSynCount());
for(DataSetEntry entry : dataSet) {
this.updateInputs(n, entry);
float prediction = n.predict();
float expectation = dataSet.getLabel(entry).getValue();
float delta = this.calculateDelta(expectation, prediction);
float loss = this.calculateLoss(delta);
currentError += loss;
Bias b = n.getBias();
biasCorrector += this.calculateWeightCorrection(learningRate, b.getInput(), delta);
for(int i = 0; i < correctorTerms.size(); i++){
Synapse syn = n.getSynapse(i);
float c = correctorTerms.get(i);
c += this.calculateWeightCorrection(learningRate, syn.getInput(), delta);
correctorTerms.set(i, c);
}
System.out.printf("Epoch : %d ", epoch);
System.out.printf("predicted : %.2f, ", prediction);
System.out.printf("expected : %.2f, ", expectation);
System.out.printf("delta : %.2f, ", delta);
System.out.printf("loss : %.2f\n", loss);
}
System.out.printf("[Total error : %.2f]\n", currentError);
n.updateBias(new Weight(biasCorrector));
for(int i = 0; i < correctorTerms.size(); i++){
Synapse syn = n.getSynapse(i);
float c = correctorTerms.get(i);
syn.setWeight(syn.getWeight() + c);
}
epoch++;
} while(currentError > errorThreshold);
}
private List<Float> initCorrectorTerms(int number){
List<Float> res = new ArrayList<>();
for(int i = 0; i < number; i++){
res.add(0F);
}
return res;
}
private void updateInputs(Neuron n, DataSetEntry entry){
int index = 0;
for(float value : entry){
n.setInput(index, new Input(value));
index++;
}
}
private float calculateDelta(float expected, float predicted){
return expected - predicted;
}
private float calculateLoss(float delta){
return ((float) Math.pow(delta, 2))/2;
}
private float calculateWeightCorrection(float lr, float value, float delta){
return lr * value * delta;
}
}

View File

@@ -0,0 +1,68 @@
package com.naaturel.ANN.implementation.training;
import com.naaturel.ANN.domain.abstraction.Neuron;
import com.naaturel.ANN.domain.model.dataset.DataSet;
import com.naaturel.ANN.domain.model.dataset.DataSetEntry;
import com.naaturel.ANN.domain.model.neuron.Input;
import com.naaturel.ANN.domain.model.neuron.Synapse;
import com.naaturel.ANN.domain.model.neuron.Weight;
public class SimpleTraining {
public SimpleTraining() {
}
public void train(Neuron n, float learningRate, DataSet dataSet) {
int epoch = 1;
int errorCount;
do {
errorCount = 0;
System.out.printf("Epoch : %d\n", epoch);
for(DataSetEntry entry : dataSet) {
this.updateInputs(n, entry);
float prediction = n.predict();
float expectation = dataSet.getLabel(entry).getValue();
float delta = this.calculateDelta(expectation, prediction);
float loss = this.calculateLoss(delta);
if(delta > 1e-6f) {
this.updateWeights(n, learningRate, delta);
errorCount += 1;
}
System.out.printf("predicted : %.2f, ", prediction);
System.out.printf("expected : %.2f, ", expectation);
System.out.printf("delta : %.2f\n", this.calculateDelta(expectation, prediction));
}
System.out.print("====================================\n");
epoch++;
} while (errorCount != 0);
}
private void updateInputs(Neuron n, DataSetEntry entry){
int index = 0;
for(float value : entry){
n.setInput(index, new Input(value));
index++;
}
}
private void updateWeights(Neuron n, float rate, float delta){
Weight biasCorrection = new Weight(n.getBias().getWeight() + (rate * delta * n.getBias().getInput()));
n.updateBias(biasCorrection);
for(Synapse syn : n.getSynapses()){
syn.setWeight(syn.getWeight() + (rate * delta * syn.getInput()));
}
}
private float calculateDelta(float expected, float predicted){
return expected - predicted;
}
private float calculateLoss(float delta){
return Math.abs(delta);
}
}