NAME
AI::jNeural::nets::backprop - Backprop nets via libjneural
SYNOPSIS
use AI::jNeural::nets::backprop;
my $alpha = 0.92;
my ($inputs, $outputs) = (1, 1);
my $hidden = 2;
my $the_net;
my $debug = 0;
my $epsilon = 0.0007;
my $max_epochs = 8000;
$the_net = new AI::jNeural::nets::backprop($alpha, $inputs, $outputs, $hidden);
# There is also BIPOLAR (among others iirc)
$the_net->set_transfer(SIGMOID);
# training data
for my $n (0.1, 0.3, 0.5, 0.7, 0.9) {
push @$sin_i1, [ $n ];
push @$sin_o1, [ sin($n) ];
}
# test data
for my $n (0.0, 1.12, 0.44, 0.85) {
push @$sin_i2, [ $n ];
push @$sin_o2, [ sin($n) ];
}
my ($min_error, $epochs_left) =
$the_net->train( $sin_i1, $sin_o1, $sin_i2,
$sin_o2, $epsilon, $max_epochs, $debug );
# This may not be quite clear say there were 3 inputs and 2 outputs, then
#
# $in = [ [1, 2, 3], [4, 5, 6], [7, 8, 9] ];
# $out = [ [1, 2], [3, 4], [5, 6] ];
#
# Follow?
#
# In any case, if you leave the test array_refs as undef's, then only the
# training data will be used.
# oh, and the above could have been:
$the_net->train({
training_inputs => $sin_i1,
testing_inputs => $sin_i2,
training_targets => $sin_o1,
testing_targets => $sin_o2,
epsilon=>$epsilon, alpha=>$alpha, max_epochs=>$max_epochs,
debug=>1,
});
# that may be a bit more readable. ;)
AUTHOR
Jettero Heller <jettero@voltar.org>
Jet's Neural Architecture is a C++ library. <http://www.voltar.org/jneural>
SEE ALSO
perl(1), AI::jNeural(3), AI::jNeural::arch::neuron(3), AI::jNeural::arch(3), AI::jNeural::nets::backprop(3), AI::jNeural::nets(3), AI::jNeural::nets::kohonen(3), AI::jNeural::utils::transfer(3), AI::jNeural::utils(3).