{ "dc.contributor.author": "Sbert, Mateu" , "dc.contributor.author": "Chen, Min" , "dc.contributor.author": "Poch Garcia, Jordi" , "dc.contributor.author": "Bardera i Reig, Antoni" , "dc.date.accessioned": "2019-01-10T10:41:08Z" , "dc.date.available": "2019-01-10T10:41:08Z" , "dc.date.issued": "2018-12-12" , "dc.identifier.issn": "1099-4300" , "dc.identifier.uri": "http://hdl.handle.net/10256/16186" , "dc.description.abstract": "Cross entropy and Kullback–Leibler (K-L) divergence are fundamental quantities of information theory, and they are widely used in many fields. Since cross entropy is the negated logarithm of likelihood, minimizing cross entropy is equivalent to maximizing likelihood, and thus, cross entropy is applied for optimization in machine learning. K-L divergence also stands independently as a commonly used metric for measuring the difference between two distributions. In this paper, we introduce new inequalities regarding cross entropy and K-L divergence by using the fact that cross entropy is the negated logarithm of the weighted geometric mean. We first apply the well-known rearrangement inequality, followed by a recent theorem on weighted Kolmogorov means, and, finally, we introduce a new theorem that directly applies to inequalities between K-L divergences. To illustrate our results, we show numerical examples of distributions" , "dc.description.sponsorship": "Mateu Sbert acknowledges the funding of National Natural Science Foundation of China under grants No.61471261 and No.61771335, and by grant TIN2016-75866-C3-3-R from Spanish Government, Jordi Poch and Anton Bardera acknowledge the funding of TIN2016-75866-C3-3-R from Spanish Government" , "dc.format.mimetype": "application/pdf" , "dc.language.iso": "eng" , "dc.publisher": "MDPI (Multidisciplinary Digital Publishing Institute)" , "dc.relation": "MINECO/PE 2016-2019/TIN2016- 75866-C3-3-R" , "dc.relation.isformatof": "Reproducció digital del document publicat a: https://doi.org/10.3390/e20120959" , "dc.relation.ispartof": "Entropy, 2018, vol. 20, núm. 12, p. 959" , "dc.relation.ispartofseries": "Articles publicats (D-B)" , "dc.rights": "Attribution 4.0 International" , "dc.rights.uri": "http://creativecommons.org/licenses/by/4.0/" , "dc.subject": "Entropia (Teoria de la informació)" , "dc.subject": "Entropy (Information theory)" , "dc.subject": "Kullback-Leibler, Divergència de" , "dc.subject": "Kullback–Leibler divergence" , "dc.title": "Some Order Preserving Inequalities for Cross Entropy and Kullback–Leibler Divergence" , "dc.type": "info:eu-repo/semantics/article" , "dc.rights.accessRights": "info:eu-repo/semantics/openAccess" , "dc.type.version": "info:eu-repo/semantics/publishedVersion" , "dc.identifier.doi": "https://doi.org/10.3390/e20120959" , "dc.identifier.idgrec": "029375" , "dc.contributor.funder": "Ministerio de Economía y Competitividad (Espanya)" , "dc.type.peerreviewed": "peer-reviewed" }