@article{he_hamza_krim_2003, title={A generalized divergence measure for robust image registration}, volume={51}, DOI={10.1109/TSP.2003.810305}, abstractNote={Entropy-based divergence measures have shown promising results in many areas of engineering and image processing. We define a new generalized divergence measure, namely, the Jensen-Renyi (1996, 1976) divergence. Some properties such as convexity and its upper bound are derived. Based on the Jensen-Renyi divergence, we propose a new approach to the problem of image registration. Some appealing advantages of registration by Jensen-Renyi divergence are illustrated, and its connections to mutual information-based registration techniques are analyzed. As the key focus of this paper, we apply Jensen-Renyi divergence for inverse synthetic aperture radar (ISAR) image registration. The goal is to estimate the target motion during the imaging time. Our approach applies Jensen-Renyi divergence to measure the statistical dependence between consecutive ISAR image frames, which would be maximal if the images are geometrically aligned. Simulation results demonstrate that the proposed method is efficient and effective.}, number={5}, journal={IEEE Transactions on Signal Processing}, author={He, Y. and Hamza, A. B. and Krim, H.}, year={2003}, pages={1211–1220} } @inbook{ben hamza_krim_2003, title={Geodesic object representation and recognition}, volume={2886}, ISBN={3540204997}, DOI={10.1007/978-3-540-39966-7_36}, abstractNote={This paper describes a shape signature that captures the intrinsic geometric structure of 3D objects. The primary motivation of the proposed approach is to encode a 3D shape into a one-dimensional geodesic distribution function. This compact and computationally simple representation is based on a global geodesic distance defined on the object surface, and takes the form of a kernel density estimate. To gain further insight into the geodesic shape distribution and its practicality in 3D computer imagery, some numerical experiments are provided to demonstrate the potential and the much improved performance of the proposed methodology in 3D object matching. This is carried out using an information-theoretic measure of dissimilarity between probabilistic shape distributions.}, booktitle={Discrete geometry for computer imagery: 11th International Conference, DGCI 2003, Naples, Italy, November 19-21, 2003}, publisher={Berlin; New York: Springer}, author={Ben Hamza, A. and Krim, H.}, editor={B. Hamza and Krim, H.Editors}, year={2003}, pages={378–387} } @inbook{ben hamza_krim_2003, title={Image registration and segmentation by maximizing the Jensen-Renyi divergence}, volume={2683}, ISBN={3540404988}, DOI={10.1007/978-3-540-45063-4_10}, abstractNote={Information theoretic measures provide quantitative entropic divergences between two probability distributions or data sets. In this paper, we analyze the theoretical properties of the Jensen-Rényi divergence which is defined between any arbitrary number of probability distributions. Using the theory of majorization, we derive its maximum value, and also some performance upper bounds in terms of the Bayes risk and the asymptotic error of the nearest neighbor classifier. To gain further insight into the robustness and the application of the Jensen-Rényi divergence measure in imaging, we provide substantial numerical experiments to show the power of this entopic measure in image registration and segmentation.}, booktitle={Energy minimization methods in computer vision and pattern recognition}, publisher={Berlin; New York: Springer}, author={Ben Hamza, A. and Krim, H.}, editor={A. Rangarajan, M. Figueiredo and Zerubia, J.Editors}, year={2003}, pages={147–163} } @article{hamza_krim_unal_2002, title={Unifying probabilistic and variational estimation}, volume={19}, ISSN={["1053-5888"]}, DOI={10.1109/MSP.2002.1028351}, abstractNote={A maximum a posteriori (MAP) estimator using a Markov or a maximum entropy random field model for a prior distribution may be viewed as a minimizer of a variational problem.Using notions from robust statistics, a variational filter referred to as a Huber gradient descent flow is proposed. It is a result of optimizing a Huber functional subject to some noise constraints and takes a hybrid form of a total variation diffusion for large gradient magnitudes and of a linear diffusion for small gradient magnitudes. Using the gained insight, and as a further extension, we propose an information-theoretic gradient descent flow which is a result of minimizing a functional that is a hybrid between a negentropy variational integral and a total variation. Illustrating examples demonstrate a much improved performance of the approach in the presence of Gaussian and heavy tailed noise. In this article, we present a variational approach to MAP estimation with a more qualitative and tutorial emphasis. The key idea behind this approach is to use geometric insight in helping construct regularizing functionals and avoiding a subjective choice of a prior in MAP estimation. Using tools from robust statistics and information theory, we show that we can extend this strategy and develop two gradient descent flows for image denoising with a demonstrated performance.}, number={5}, journal={IEEE SIGNAL PROCESSING MAGAZINE}, author={Hamza, AB and Krim, H and Unal, GB}, year={2002}, month={Sep}, pages={37–47} } @article{ben hamza_krim_2001, title={A variational approach to maximum a posteriori estimation for image denoising}, volume={2134}, DOI={10.1007/3-540-44745-8_2}, abstractNote={Using first principles, we establish in this paper a connection between the maximum a posteriori (MAP) estimator and the variational formulation of optimizing a given functional subject to some noise constraints. A MAP estimator which uses a Markov or a maximum entropy random field model for a prior distribution can be viewed as a minimizer of a variational problem. Using notions from robust statistics, a variational filter called Huber gradient descent flow is proposed. It yields the solution to a Huber type functional subject to some noise constraints, and the resulting filter behaves like a total variation anisotropic diffusion for large gradient magnitudes and like an isotropic diffusion for small gradient magnitudes. Using some of the gained insight, we are also able to propose an information-theoretic gradient descent flow whose functional turns out to be a compromise between a neg-entropy variational integral and a total variation. Illustrating examples demonstrate a much improved performance of the proposed filters in the presence of Gaussian and heavy tailed noise.}, journal={Energy minimization methods in computer vision and pattern recognition: Third International Workshop, EMMCVPR 2001, Sophia Antipolis, France, September 3-5, 2001: Proceedings}, publisher={Berlin ; New York: Springer}, author={Ben Hamza, A. and Krim, H.}, editor={M. Figueiredo, J. Zerubia and Jain, A. K.Editors}, year={2001}, pages={19–33} }