def kl_divergence(p, q): return np.sum(np.where(p != 0, p * np.log(p / q), 0)) if __name__ == '__main__': x = np.arange(-10, 10, 0.001) p = norm.pdf(x, 0, 2) q = norm.pdf(x, 2, 2) print(kl_divergence(p, q)) KL Divergence Python Example We can think of the KL divergence as distance metric (although it isn’t symmetric) that quantifies the difference between two probability… towardsdatascience.com ..