@article { , title = {Novel probability neural network}, abstract = {This paper presents a novel probability neural network (PNN) that can classify the data for both continuous and categorical input data types. A mixture model of continuous and categorical variables is proposed to construct a probability density function (PDF) that is the key part for the PNN. The proposed PNN has two advantages compared to conventional algorithms such as the multilayer perceptron (MLP) neural network. One is that the PNN can produce better results compared to the MLP neural network when the input data set includes both continuous and categorical data types, even using the normalised input variables. Normally, the normalised input variables generate a better result than the non-normalised input variables for the MLP neural network. The second advantage is that the PNN does not need the cross-validation data set and does not produce over-training like the MLP neural network. These advantages have been proven in our experimental study. The proposed PNN can also be used to perform unsupervised cluster analysis. The superiority of the PNN, compared to the MLP neural network, Radical Basis Function (RBF) neural network, C4.5 and Random Forest decisions trees, are demonstrated by applying them to two real-life data sets, the Heart Disease and Trauma data sets, which include both continuous and categorical variables.}, doi = {10.1049/ip-vis:20059021}, issn = {1350-245X}, issue = {5}, journal = {IEE Proceedings - Vision, Image, and Signal Processing}, pages = {535-544}, publicationstatus = {Published}, url = {http://researchrepository.napier.ac.uk/Output/2880458}, volume = {152}, year = {2005}, author = {Cang, S. and Yu, H.} }