1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
|
import numpy as np
from sklearn.mixture import GaussianMixture
class GaussianMixture1D(object):
"""
Simple class to work with 1D mixtures of Gaussians
Parameters
----------
means : array_like
means of component distributions (default = 0)
sigmas : array_like
standard deviations of component distributions (default = 1)
weights : array_like
weight of component distributions (default = 1)
"""
def __init__(self, means=0, sigmas=1, weights=1):
data = np.array([t for t in np.broadcast(means, sigmas, weights)])
components = data.shape[0]
self._gmm = GaussianMixture(components, covariance_type='spherical')
self._gmm.means_ = data[:, :1]
self._gmm.weights_ = data[:, 2] / data[:, 2].sum()
self._gmm.covariances_ = data[:, 1] ** 2
self._gmm.precisions_cholesky_ = 1 / np.sqrt(self._gmm.covariances_)
self._gmm.fit = None # disable fit method for safety
def sample(self, size):
"""Random sample"""
return self._gmm.sample(size)
def pdf(self, x):
"""Compute probability distribution"""
if x.ndim == 1:
x = x[:, np.newaxis]
logprob = self._gmm.score_samples(x)
return np.exp(logprob)
def pdf_individual(self, x):
"""Compute probability distribution of each component"""
if x.ndim == 1:
x = x[:, np.newaxis]
logprob = self._gmm.score_samples(x)
responsibilities = self._gmm.predict_proba(x)
return responsibilities * np.exp(logprob[:, np.newaxis])
|