## Theorem 6.4(for monotone missing data & constant p): call the functions of theorem 5.8 (a)(b)(c) test.r.order<-function(data, covariate, p0, p1){ data <- as.matrix(data) Z <- as.matrix(covariate) n <- ncol(data) m <- ncol(Z) p <- p0 q <- p1-p0 ##theorem 5.8(c) partial <- partial.inter(cov.cor(REMLE.cov(data, Z, p+q, REMLE=TRUE))) sum <- 0 sum.M <- 0 for(j in 1:q){ sum1 <- 0 sum2 <- 0 for(i in (p+j+1):n){ N <- nrow(data[!is.na(data[,i]),]) #Ni r <- partial[i,i-p-j] sum1 <- sum1+N*log(1-r^2) sum2 <- sum2+(N-p-j)*log(1-r^2) } sum <- sum+sum1 sum.M <- sum.M+sum2 } LRT <- -sum MLRT <- -sum.M df <- (2*n-2*p-q-1)*(q/2) pvalue.LRT <- 1-pchisq(LRT, df) pvalue.MLRT <- 1-pchisq(MLRT, df) list(LRT=LRT, pvalue.LRT=pvalue.LRT, MLRT=MLRT, pvalue.MLRT=pvalue.MLRT) } #################################################################################### ## the following functions are of theorem 5.8 (a)(b)(c) ###############Theorem 5.8 (a)############### REMLE.phi.delta<-function(data, covariate, p, REMLE){ data <- as.matrix(data) Z <- as.matrix(covariate) n <- ncol(data) m <- ncol(Z) V <- matrix(rep(0,n*n), nrow = n) if(length(p)==1) p <- rep(0:p, c(rep(1,p),(n-p))) for(i in 1:n){ Z <- as.matrix(Z[!is.na(data[,i]),]) data <- data[!is.na(data[,i]),] N <- nrow(data) if(m==1) A <- cov(matrix(data[ ,(1:i)], ncol=i))*(N-m)/N else{ Y <- NULL for(j in 1:N) Y <- cbind(Y, t(data[j,1:i])) Y <- t(Y) ## p.147 beta.bar <- kronecker(solve(t(Z)%*%Z)%*%t(Z), diag(i))%*%Y A <- matrix(rep(0,i*i), nrow = i) for(s in 1:N){ M <- matrix(data[s,1:i], nr=i)-kronecker(matrix(Z[s,], nr=1), diag(i))%*%beta.bar A <- A + M %*% t(M) } A <- A/N } if(REMLE==TRUE) cov <- (N/(N-m))*A else cov <- A ## innovation variances(2.18) for i=1 if(i==1) V[i,i] <- cov else{ ## innovation variances(2.18) V[i,i] <- cov[i,i]-t(cov[(i-min(p[i],(i-1))):(i-1), i]) %*% solve(cov[(i-min(p[i],(i-1))):(i-1), (i-min(p[i],(i-1))):(i-1)]) %*% cov[(i-min(p[i],(i-1))):(i-1), i] ## autoregressive coefficients(2.19) phi <- as.vector(t(cov[(i-min(p[i],(i-1))):(i-1),i]) %*% solve(cov[(i-min(p[i],(i-1))):(i-1),(i-min(p[i],(i-1))):(i-1)])) for(j in (i-min(p[i],(i-1))):(i-1)) V[i,j] <- phi[j+1-(i-min(p[i],(i-1)))] } } V } ###############Theorem 5.8 (b)############### ## calculate the correlations below and above the main diagonals cov.cor<-function(V){ for(i in 2: nrow(V)){ for(j in 1:(i-1)) V[j,i] <- V[i,j] <- V[i,j]/sqrt(V[i,i]*V[j,j]) } V } REMLE.cov<-function(data, covariate, p, REMLE){ n <- ncol(data) T <- -REMLE.phi.delta(data, covariate, p, REMLE=REMLE) D <- matrix(rep(0,n*n), nrow = n) diag(D) <- -diag(T) diag(T) <- rep(1,n) solve(T) %*% D %*% solve(t(T)) } ###############Theorem 5.8 (c)############### ## inverse inv <- function(cov){ solve(cov) } ## intervenor-adjusted partial correlation ## recursion formula for partial correlations in (2.6) recur.pcor<-function(index1, index2, index3, V){ index1 <- index1 index2 <- index2 index3 <- index3 index <- c(index1, index2, index3) if(length(index3)==1){ rxy.z <- (V[index[1], index[2]]-V[index[1], index[3]]*V[index[2], index[3]])/ (sqrt(1-V[index[1], index[3]]^2)*sqrt(1-V[index[2], index[3]]^2)) return(rxy.z) } else{ index1 <- index[1] index2 <- index[2] index30 <- index[3] index3c <- index[-c(1,2,3)] rxy.zc <- recur.pcor(index1, index2, index3c, V) rxz0.zc <- recur.pcor(index1,index30,index3c, V) ryz0.zc <- recur.pcor(index2,index30,index3c, V) rxy.z <- (rxy.zc - rxz0.zc*ryz0.zc)/(sqrt(1-rxz0.zc^2)*sqrt(1-ryz0.zc^2)) return(rxy.z) } } partial.inter<-function(cor){ V <- cor n <- ncol(cor) m <- n-2 for(k in 1:m){ if(k==m) V[1, n] <- V[n, 1] <- recur.pcor(1, n, c(2:(n-1)), cor) else diag(V[1:(n-k-1), (k+2):n]) <- diag(V[(k+2):n, 1:(n-k-1)]) <- sapply(1:(m+1-k), function(i) if(k==1) recur.pcor(i, i+k+1, i+1, cor) else recur.pcor(i, i+k+1, c((i+1):(i+k)), cor)) } V }