This is an open problem in statistics and machine learning. Several methods to approximate the KL divergence have been proposed. For instance, take a look at the FNN R package:
https://cran.r-project.org/web/packages/FNN/FNN.pdf
It miserably fails sometimes, but it works in simple cases and with large samples (for samples smaller than 100 it can behave erratically). For instance, consider the distance between a t distribution with $\nu =1,2,3, 100$ degrees of freedom and a normal distribution (R code taken from this link).
With $n=10,000$ samples
library(knitr)
library(FNN)
Normalising constant
K <- function(d,nu) (gamma(0.5(nu+d))/( gamma(0.5nu)sqrt((pinu)^d) ))
Kullback Liebler divergence
DKLn <- function(nu){
val1 <- -0.5dlog(2pi) -0.5d
tempf <- Vectorize(function(t) exp(-0.5t)t^(0.5d-1)log(1+t/nu))
int<- integrate(tempf,0,Inf,rel.tol = 1e-9)$value
val2 <- log(K(d,nu)) - 0.5(nu+d)(1/(gamma(0.5d)2^(0.5d)))int
return(val1-val2)
}
Kullback Liebler divergence: numerical integration 1-d
DKLn2 <- function(nu){
tempf <- Vectorize(function(t) dnorm(t)*(dnorm(t,log=T) - dt(t,df=nu,log=T)))
int<- integrate(tempf,-Inf,Inf,rel.tol = 1e-9)$value
return(int)
}
Kullback Leibler in one dimension
d=1 # dimension
DKLn(1)
X <- rt(10000, df = 1)
Y <- rnorm(10000)
plot(KL.divergence(Y, X, 100))
DKLn(2)
X <- rt(10000, df = 2)
Y <- rnorm(10000)
plot(KL.divergence(Y, X, 100))
DKLn(3)
X <- rt(10000, df = 3)
Y <- rnorm(10000)
plot(KL.divergence(Y, X, 100))
DKLn(100)
X <- rt(10000, df = 100)
Y <- rnorm(10000)
plot(KL.divergence(Y, X, 100))
With $n=250$
library(knitr)
library(FNN)
Normalising constant
K <- function(d,nu) (gamma(0.5(nu+d))/( gamma(0.5nu)sqrt((pinu)^d) ))
Kullback Liebler divergence
DKLn <- function(nu){
val1 <- -0.5dlog(2pi) -0.5d
tempf <- Vectorize(function(t) exp(-0.5t)t^(0.5d-1)log(1+t/nu))
int<- integrate(tempf,0,Inf,rel.tol = 1e-9)$value
val2 <- log(K(d,nu)) - 0.5(nu+d)(1/(gamma(0.5d)2^(0.5d)))int
return(val1-val2)
}
Kullback Liebler divergence: numerical integration 1-d
DKLn2 <- function(nu){
tempf <- Vectorize(function(t) dnorm(t)*(dnorm(t,log=T) - dt(t,df=nu,log=T)))
int<- integrate(tempf,-Inf,Inf,rel.tol = 1e-9)$value
return(int)
}
Kullback Leibler in one dimension
d=1 # dimension
DKLn(1)
X <- rt(250, df = 1)
Y <- rnorm(250)
plot(KL.divergence(Y, X, 100))
DKLn(2)
X <- rt(250, df = 2)
Y <- rnorm(250)
plot(KL.divergence(Y, X, 100))
DKLn(3)
X <- rt(250, df = 3)
Y <- rnorm(250)
plot(KL.divergence(Y, X, 100))
DKLn(100)
X <- rt(250, df = 100)
Y <- rnorm(250)
plot(KL.divergence(Y, X, 100))