@article{725, author = {Nasim Latifi, Ali Amiri}, title = {Partial and Random Updating Weights in Error Back Propagation Algorithm}, journal = {Journal of Networking Technology}, year = {2012}, volume = {3}, number = {1}, doi = {}, url = {http://www.dline.info/jnt/fulltext/v3n1/4.pdf}, abstract = {Multi-Layered Perceptron (MLP) is a useful supervised neural network for data classification. Error Back Propagation (EBP) algorithm is the common technique for training MLP. Standard EBP algorithm has challenges for largescale and heterogeneous data such as lack of memory and low–speed convergence, besides, computational load is high. In this paper, to overcome these drawbacks, a modified version of EBP has been proposed. It decreases the time and space complexity, and somewhat increases convergence speed of the standard EPB by partial and random updating of some of weights instead of all of them. Result of experiments on two standard dataset, confirms the effectiveness of the proposed algorithm.}, }