Title : ( A New Information Theoretic Relation between Minimum Error Entropy and Maximum Correntropy )
Authors: ahmad reza heravi , Ghosheh Abed Hodtani ,Access to full-text not allowed by authors
Abstract
Abstract—The past decade has seen the rapid development of information theoretic learning (ITL) and its applications in signal processing and machine learning. Specifically, minimum Error entropy (MEE) and maximum correntropy criterion (MCC) have been widely studied in the literature. Although, MEE and MCC are applied in many branches of knowledge and could outperform statistical criteria (such as mean square error), they have not been compared with each other from theoretical point of view. In some cases MEE and MCC perform similarly to each other, however, under some conditions (e.g., in non-Gaussian environments) they act differently. This letter derives a new information theoretic relation between MEE and MCC, leading to better understanding of the theoretical differences, and illustrates the findings in a common example.
Keywords
, Terms—Entropy, Correntropy, Minimum error entropy, Information theoretic learning, Mean square Error, Machine learning.@article{paperid:1067295,
author = {Heravi, Ahmad Reza and Abed Hodtani, Ghosheh},
title = {A New Information Theoretic Relation between Minimum Error Entropy and Maximum Correntropy},
journal = {IEEE Signal Processing Letters},
year = {2018},
volume = {125},
number = {7},
month = {July},
issn = {1070-9908},
pages = {921--925},
numpages = {4},
keywords = {Terms—Entropy; Correntropy; Minimum error entropy; Information theoretic learning; Mean square Error; Machine learning.},
}
%0 Journal Article
%T A New Information Theoretic Relation between Minimum Error Entropy and Maximum Correntropy
%A Heravi, Ahmad Reza
%A Abed Hodtani, Ghosheh
%J IEEE Signal Processing Letters
%@ 1070-9908
%D 2018