The fast marching method is widely employed in several fields of image processing. Some years ago a multi-stencil version (MSFM) was introduced to improve its accuracy by solving the equation for a set of stencils and choosing the best solution at each considered node. The following work proposes a modified numerical scheme for MSFM to take into account the variation of the local cost, which has proven to be second order. The influence of the stencil set choice on the algorithm outcome with respect to stencil orthogonality and axis swapping is also explored, where stencils are taken from neighborhoods of varying radius. The experimental results show that the proposed schemes improve the accuracy of their original counterparts, and that the use of permutation-invariant stencil sets provides robustness against shifted vector coordinates in the stencil set.

}, keywords = {Approximation algorithms, Differential equations, Eikonal equation, Frequency modulation, MSFM, Mathematical model, Silicon, Three-dimensional displays, Unmanned aerial vehicles, Vectors, axis swapping, difference equations, fast marching methods, finite difference methods, finite differences, image processing, iterative methods, least squares approximations, multi-stencil schemes, multistencil version, nonconstant local cost model, permutation-invariant stencil sets, second order multistencil fast marching method, stencil orthogonality, stencil set}, issn = {1057-7149}, doi = {10.1109/TIP.2018.2880507}, url = {https://ieeexplore.ieee.org/document/8531783/}, author = {S. Merino-Caviedes and Lucilio Cordero-Grande and M. T. P{\'e}rez and Pablo Casaseca-de-la-Higuera and M. Mart{\'\i}n-Fern{\'a}ndez and R. Deriche and C. Alberola-L{\'o}pez} } @conference {413, title = {Fusing Output Information in Neural Networks: Ensemble Performs Better}, booktitle = {Annual International Conference of the IEEE Engineering in Medicine and Biology - Proceedings}, year = {2003}, address = {Cancun}, abstract = {A neural network ensemble is a learning paradigm where a finite number of component neural networks are trained for the same task. Previous research suggests that an ensemble as a whole is often more accurate than any of the single component networks. This paper focuses on the advantages of fusing different nature network architectures, and to determine the appropriate information fusion algorithm in component neural networks by several approaches within hard decision classifiers, when solving a binary pattern recognition problem. We numerically simulated and compared the different fusion approaches in terms of the mean-square error rate in testing data set, over synthetically generated binary Gaussian noisy data, and stated the advantages of fusing the hard outputs of different component networks to make a final hard decision classification. The results of the experiments indicate that neural network ensembles can indeed improve the overall accuracy for classification problems; in all fusion architectures tested, the ensemble correct classification rates are better than those achieved by the individual component networks. Finally we are nowadays comparing the above mentioned hard decision classifiers with new soft decision classifier architectures that make use of the additional continuous type intermediate network soft outputs, fulfilling probability fundamental laws (positive, and add to unity), which can be understood as the a posteriori probabilities of a given pattern to belong to a certain class.

}, keywords = {Algorithms, Backpropagation, Classification (of information), Computer simulation, Decision making, Estimation, Gaussian noise (electronic), Information fusions, Mathematical models, Medical imaging, Model selection, Multilayer neural networks, Neural network ensembles, Pattern recognition, Probability, Probability estimation, Problem solving, Regularization, Statistical methods, Statistical pattern recognition, Vectors}, doi = {https://doi.org/10.1109/IEMBS.2003.1280254}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-1542301061\&partnerID=40\&md5=32dbadb3b6ac3c6ae1ea33d89b52c75f}, author = {Y Wu and J I Arribas} }