@misc{Huk_Maciej_Backpropagation,
author={Huk, Maciej},
howpublished={online},
publisher={Zielona Góra: Uniwersytet Zielonogórski},
language={eng},
abstract={In this paper the Sigma-if artificial neural network model is considered, which is a generalization of an MLP network with sigmoidal neurons. It was found to be a potentially universal tool for automatic creation of distributed classification and selective attention systems. To overcome the high nonlinearity of the aggregation function of Sigma-if neurons, the training process of the Sigma-if network combines an error backpropagation algorithm with the self-consistency paradigm widely used in physics.},
abstract={But for the same reason, the classical backpropagation delta rule for the MLP network cannot be used. The general equation for the backpropagation generalized delta rule for the Sigma-if neural network is derived and a selection of experimental results that confirm its usefulness are presented.},
title={Backpropagation generalized delta rule for the selective attention Sigma-if artificial neural network},
type={artykuł},
keywords={artificial neural networks, selective attention, self consistency, error backpropagation, delta rule},
}