mpinv <- function(X)
{
	# Moore-Penrose Inverse

	# Machine precision

	Eps <- 100 * .Machine$double.eps;
	
	s <- svd(X);
	d <- s$d;
	m <- length(d);
	if (m == 1)
		return(t(s$v%*%(1/d)%*%t(s$u)));

	# remove Eigenvalues equal zero 

	d <- d[d > Eps];
	notnull <- length(d);	
	
	if (notnull == 1)
	{
		inv <- 1/d;
	} else {
		inv <- solve(diag(d));
	}

	# put together again

	if (notnull != m)
	{
		inv <- cbind(inv, matrix(0, nrow=notnull, ncol=(m - notnull)));
		inv <- rbind(inv, matrix(0, nrow=(m-notnull), ncol=m));
	} 
	mp <- s$v%*%inv%*%t(s$u);
 	mp[abs(mp) < Eps] <- 0;
	return(mp);
};
multifit <- function(Y,X,K,Z)
{

	if (is.vector(Y)) 
	{ 
		# multivariate T^2 reduces to F-Test in the univariate case
		n <- length(Y)
		p <- 1
		Y <- matrix(Y, ncol=1)
	}

	z <- list(response = Y, dmatrix = X, testK = K, restrZ = Z, coefficients = 0, covar = 0, df1 = 0, df2 = 0, hotelstat = 0 , hotelp = 1); 
	class(z) <- "multilm"

	# multivariate linear model Y = X Phi + E

	n <- nrow(Y);
	p <- ncol(Y);
	m <- ncol(X);
	
	# if no test matrix specified, test: all coeffcients equal 0

	if (all(K == 0)) K <- cbind(diag(m-1), -1)
	
	# Kern(Z)-rest. Moore-Penrose-Inverse of X

	MPXZ <- rmpinv(X,Z);
	
	if (all(Z == 0))
	{
		ZK <- K;
	} else {
		ZK <- rbind(Z,K);
	}
	MPXZK <- rmpinv(X,ZK)
	
	# parameter estimation

	hatPHI <- MPXZ%*%Y;
	
	z$coefficients <- hatPHI;

	# projection matrices

	Ph <- X%*%(MPXZ - MPXZK);
	Pe <- diag(n) - X%*%MPXZ;
	Sh <- t(Y)%*%Ph%*%Y;
	Se <- t(Y)%*%Pe%*%Y;
	
	# degrees of freedom

	nh <- sum(diag(Ph));
	ne <- sum(diag(Pe));
	
	# covariance estimation 

	hatSigma <- 1/ne*Se;

	z$covar <- hatSigma

	# Hotelling-Lawley Test: H0: K%*%Phi = 0
	
	eigenvalues <- eigen(Sh%*%solve(Se))$val  

	realeigenw <- Re(eigenvalues[Im(eigenvalues) == 0]);

	HL <- sum(realeigenw);

	z$hotelstat <- HL

	# approximation according to Laeuter (1974): Approximation des
	# Hotellingschen T^2 durch die F-Verteilung, Biometrische 
	# Zeitschrift 16, Heft 3 
	
	f1 <- nrow(K);
	if (is.null(f1)) f1 <- 1;
	f2 <- nrow(Y) - (ncol(X) - 1);		# Rang(X) = m - 1
	p <- ncol(Y);
	if (f1 + f2 - f1*p -1 > 0)
	{
		g1 <- (f1*p*(f2 - p))/(f1 + f2 - f1*p - 1)
		g2 <- f2 - p +1
	} else {
		z$hotelp <- 1
		stop("cannot approximate null distribution: to less observations");
	}
	tildeF <- (f2 - p -1)/(f1*p)*g2/(g2 -2)*HL;
	z$hotelstat <- tildeF
	z$df1 <- round(g1)
	z$df2 <- round(g2)
	pvalue <- 1 - pf(tildeF, round(g1), round(g2));

	z$hotelp <- pvalue

	return(z)
}

	
		 
	
	multilm <- function(formula, K=0 , Z=0, data=list())
{
	mf <- model.frame(formula, data=data)
        Y <- model.response(mf);
        X <- model.matrix(formula, data=data);
	return(multifit(Y,X,K,Z));
}
		 
	
	print.summary.multilm <- function (x) 
{
    cat("test procedure: ", x$method, "\n")
    cat("test statistic: ", x$stat, "\n")
    cat("degrees of freedom DF1: ", x$df1, " DF2: ", x$df2, "\n")
    cat("p-value: ", x$pvalue, "\n")
    invisible(x)
}
rmpinv <- function(X,Z = 0)
{
	# Kern(Z)-restricted Moore-Penrose-Inverse
	
	# Machinen precision

	Eps <- 100* .Machine$double.eps;
	
	
	if (all(Z == 0))
	{
		return(mpinv(X));
	} else {
		ZZ <- mpinv(Z)%*%Z;
		ZZ[abs(ZZ) < Eps] <- 0;
		n <- nrow(ZZ);
		if (all(diag(n) == ZZ))
		{
			stop("Error: ZZ equals diag(n)");
			return(-1);
		} else {
			DZ <- diag(n) - ZZ;
			DZ[abs(DZ) < Eps] <- 0;
			return(mpinv(X%*%DZ));
		}
	}
}
summary.multilm <- function(object, test="Hotelling")
{
	if (!(inherits(object,"multilm"))) stop("no multilm object given");
	z <- .Alias(object)
	Y <- z$response
	n <- nrow(Y)
	X <- z$dmatrix
	K <-z$testK
	Z <- z$restrZ
	ans <- list(method=test, pvalue = 1, stat=0, df1=0, df2=0);
	names(ans$pvalue) <- "P-value";
	if(test=="Hotelling") 
	{ 
		ans$pvalue <- z$hotelp; 
		ans$stat <- z$hotelstat;
		ans$df1 <- z$df1
		ans$df2 <- z$df2
	}
	
	qY <- 1/n*t(Y)%*%rep(1,n)%*%t(rep(1,n))
        W <- (t(Y) - qY)%*%t(t(Y) - qY);
		
	if(test=="PC-q")
	{
	       	# eigenvalue problem: WD = diag(W)DL 
		# diag(W) is non-singular 
		# <=> solve(diag(W)) W D = D L 
		# <=> eigen(solve(diag(W))%*%W)
		# e.g. Numerical Recipes in C, p. 462
                                                       
        	ewp <- eigen(solve(diag(diag(W)))%*%W);
                # Kaiserkriterium
                q <- length(ewp$values[ewp$values > 1]);
                D <- ewp$vectors[,1:q];
		nY <- Y%*%D;
		dummy <- multifit( nY, X, K, Z);
		ans$pvalue <- dummy$hotelp;
		ans$stat <- dummy$hotelstat;
		ans$df1 <- dummy$df1
		ans$df2 <- dummy$df2
	}
	if(test=="PC-1")
	{
		# eigenvalue problem Wd = lambda diag(W) d, see PC-q
                                                          
                ewp <- eigen(solve(diag(diag(W)))%*%W);
                d <- ewp$vectors[,1];
		nY <- Y%*%d
		dummy <- multifit(nY, X, K, Z)
		ans$pvalue <- dummy$hotelp;
		ans$stat <- dummy$hotelstat;
		ans$df1 <- dummy$df1
		ans$df2 <- dummy$df2
	}
	if (test=="SS")
	{
		d <- 1/sqrt(diag(W))
		nY <- Y%*%d
		dummy <- multifit(nY, X, K, Z)
		ans$pvalue <- dummy$hotelp;
		ans$stat <- dummy$hotelstat;
		ans$df1 <- dummy$df1
		ans$df2 <- dummy$df2
	}
	if (test=="CS")
	{
		p <- ncol(W)
		A <- diag(diag(W))
		d <- solve(A)%*%W%*%sqrt(solve(A))%*%rep(1,p) 		
		nY <- Y%*%d
		dummy <- multifit(nY, X, K, Z)
		ans$pvalue <- dummy$hotelp;
		ans$stat <- dummy$hotelstat;
		ans$df1 <- dummy$df1
		ans$df2 <- dummy$df2
	}
	class(ans) <- "summary.multilm" 
	ans
}




