Home > gmmbayestb-v1.0 > gmmbvl_em_step.m

gmmbvl_em_step

PURPOSE ^

gmmbvl_em_step - EM learning step for multivariate Gaussian mixtures

SYNOPSIS ^

function [W,M,R] = gmmbvl_em_step(X,W,M,R,P,plo)

DESCRIPTION ^

gmmbvl_em_step - EM learning step for multivariate Gaussian mixtures

[W,M,R] = gmmbvl_em_step(X,W,M,R,P,plo)
  X - (n x d) matrix of input data
  W - (k x 1) vector of mixing weights
  M - (k x d) matrix of components means
  R - (k x d^2) matrix of Cholesky submatrices of components covariances
      in vector reshaped format. To get the covariance of component k:
      Rk = reshape(R(k,:),d,d); S = Rk'*Rk;
  P - (n x k) posterior probabilities of all components (from previous EM step)
  plo - if 1 then plot ellipses for 2-d data
returns
  W - (k x 1) matrix of components priors
  M - (k x d) matrix of components means
  R - (k x d^2) matrix of Cholesky submatrices of components covariances

CROSS-REFERENCE INFORMATION ^

This function calls: This function is called by:

SOURCE CODE ^

0001 function [W,M,R] = gmmbvl_em_step(X,W,M,R,P,plo)
0002 %gmmbvl_em_step - EM learning step for multivariate Gaussian mixtures
0003 %
0004 %[W,M,R] = gmmbvl_em_step(X,W,M,R,P,plo)
0005 %  X - (n x d) matrix of input data
0006 %  W - (k x 1) vector of mixing weights
0007 %  M - (k x d) matrix of components means
0008 %  R - (k x d^2) matrix of Cholesky submatrices of components covariances
0009 %      in vector reshaped format. To get the covariance of component k:
0010 %      Rk = reshape(R(k,:),d,d); S = Rk'*Rk;
0011 %  P - (n x k) posterior probabilities of all components (from previous EM step)
0012 %  plo - if 1 then plot ellipses for 2-d data
0013 %returns
0014 %  W - (k x 1) matrix of components priors
0015 %  M - (k x d) matrix of components means
0016 %  R - (k x d^2) matrix of Cholesky submatrices of components covariances
0017 
0018 % Nikos Vlassis, 2000
0019 
0020 %
0021 % $Name:  $
0022 
0023 [n,d] = size(X);
0024 
0025 
0026 if plo 
0027     figure(1);
0028     if d == 1
0029         plot(X,zeros(n,1),'k+');
0030     else
0031         plot(X(:,1),X(:,2),'g+');
0032     end
0033     hold on;
0034 end
0035 
0036 Psum = sum(P,1);
0037 
0038 for j = 1:length(W)
0039     if Psum(j) > eps
0040         % update mixing weight
0041         W(j) = Psum(j) / n;
0042 
0043         % update mean
0044         M(j,:) = P(:,j)' * X ./ Psum(j);
0045     
0046         % update covariance matrix
0047         Mj = repmat(M(j,:),n,1);
0048         Sj = ((X - Mj) .* repmat(P(:,j),1,d))' * (X - Mj) ./ ...
0049            repmat(Psum(j),d,d);
0050 
0051         % check for singularities
0052         [U,L,V] = svd(Sj); 
0053         l = diag(L);
0054         if (min(l) > eps) & (max(l)/min(l) < 1e4)
0055             [Rj,p] = chol(Sj);
0056             if p == 0
0057                 R(j,:) = Rj(:)';
0058             end
0059         end
0060 
0061         % plot ellipses
0062         if plo
0063             if d == 1
0064                 x = linspace(min(X) - 3*max(R), ...
0065                    max(X) + 3*max(R), 500 )';
0066                 Lx = gmmbvl_em_gauss(x,M,R);
0067                 Fx = Lx*W;
0068                 plot(x,Fx,'k-');
0069             else
0070                 Rk = reshape(R(j,:),d,d); S = Rk'*Rk;l=svd(S);
0071                 phi = acos(V(1,1));
0072                 if V(2,1) < 0
0073                     phi = 2*pi - phi;
0074                 end
0075                 plot(M(j,1),M(j,2),'k.',M(j,1),M(j,2),'k+');
0076                 gmmbvl_ellipse( 2*sqrt(l(1)), 2*sqrt(l(2)), ...
0077                    phi, M(j,1), M(j,2),'k' );
0078             end
0079         end
0080     end
0081 end
0082 
0083 if plo
0084     if  d==2
0085         a = (max(X(:,1)) - min(X(:,1))) / 10;
0086         b = (max(X(:,2)) - min(X(:,2))) / 10;
0087         axis([min(X(:,1))-a max(X(:,1))+a min(X(:,2))-b max(X(:,2))+b]);
0088     end
0089     drawnow;
0090     hold off;
0091 end
0092 
0093

Generated on Thu 14-Apr-2005 13:50:22 by m2html © 2003