.packageName <- "MiscPsycho"
`alpha` <-
function(columns){
k <- ncol(columns)
colVars <- apply(columns, 2, var)
total   <- var(apply(columns, 1, sum))
a <- (total - sum(colVars)) / total * (k/(k-1))
a
 }

`alpha.Summary` <-
function(columns){
result <- numeric(ncol(columns))
n      <- ncol(columns)
for(i in 1:length(columns)){
result[i] <- alpha(columns[-i])
dat <- data.frame(Item = 1:n, alpha = result)
}
cat("Below is what alpha *would be* if the item were removed", '\n', '\n')
dat
}

`class.acc` <-
function(x,b,prof_cut, mu=0, sigma=1, aboveQ=TRUE){
   result <- class.numer(x,b,prof_cut, mu,sigma, aboveQ)/class.denom(x,b, mu, sigma)
   return(result)
   }

`class.denom` <-
function(x,b, mu=0, sigma=1){
   gauss_denom <- gauss.quad.prob(49, dist='normal', mu=mu, sigma=sigma)
   mat <- rbind(like.mat(x,b,gauss_denom$nodes),gauss_denom$weights) 
   sum(apply(mat, 2, prod))
   }

`class.numer` <-
function(x,b, prof_cut, mu=0, sigma=1, aboveQ){
   gauss_numer <- gauss.quad(49,kind="laguerre")
   if(aboveQ==FALSE){   
      mat <- rbind(like.mat(x,b, (prof_cut-gauss_numer$nodes)), dnorm(prof_cut-gauss_numer$nodes, mean=mu, sd=sigma))
      } else { mat <- rbind(like.mat(x,b, (gauss_numer$nodes+prof_cut)), dnorm(gauss_numer$nodes+prof_cut, mean=mu,                 sd=sigma))
   }   
   f_y <- rbind(apply(mat, 2, prod), exp(gauss_numer$nodes), gauss_numer$weights)
   sum(apply(f_y,2,prod))
   }

`classical` <-
function(data){
p <- colMeans(data) # p-values
b <- pb(data)       # point biserial
data.frame(p_values = p, Point_Biserial = b)
 }

`fit.Stats` <-
function(dat, params){
raschO <- function(b,theta){
  1 - (1 / (1 + exp(outer(theta,b,'-'))))
   }
theta <- apply(dat, 1, theta.max, params)
p <- raschO(as.vector(params), theta)
z2 <-((dat - p)/ sqrt(p * (1- p)))^2
v  <- sqrt(p * (1- p))^2
IF <- colSums(z2*v) / colSums(v) 
OF  <- colMeans(z2)
data.frame(IF,OF)
}

`jml` <-
function(dat, con = .001, bias=FALSE){
rasch <- function(theta,b) 1/ (1 + exp(b-theta)) # Rasch function

############ Analytic Derivatives ############
# Vector of first derivatives
gradient <- function(dat, b_vector, theta){
-1 * (colSums(dat) - rowSums(apply(as.matrix(theta), 1, rasch, b_vector)))
}
# Hessian Matrix (Second derivatives)
hessian <- function(theta, b_vector){
-1 * diag(rowSums(apply(as.matrix(theta), 1, rasch, b_vector) * (1-apply(as.matrix(theta), 1, rasch, b_vector))))
}
##############################################
dat <- as.matrix(dat)
dat <- dat[rowSums(dat)!=0,]         # get rid of all incorrect
dat <- dat[rowSums(dat)!=ncol(dat),] # get rid of perfect scores
b_vector <- rep(0, ncol(dat))        # starting values
change <- rep(1, ncol(dat))
iter <- 0
while(any(abs(change) > con)) {
theta <- apply(dat, 1, theta.max, b_vector)
change <- solve(hessian(theta, b_vector)) %*% gradient(dat, b_vector, theta)
b_vector <- b_vector - change # updated items params
iter <- iter + 1
}
b_vector <- b_vector - mean(b_vector) # center on zero
if(bias==FALSE){ # correct for JML bias
b_vector <- b_vector
} else { b_vector <- b_vector * ((length(b_vector)-1)/length(b_vector))
}
cat("Convergence was reached in", iter, "iterations", '\n')
fit <- fit.Stats(dat, b_vector) # Get Infit and Outfit
print(data.frame(params = b_vector, SE = 1/sqrt(-1 * diag(hessian(theta, b_vector))), N = nrow(dat), Infit = fit$IF, Outfit = fit$OF))
}

`like.mat` <-
function(x,b,theta){
   rasch(b, theta)^x * (1 - rasch(b,theta))^(1-x)
   }

`pb` <-
function(data){
result <- numeric(ncol(data))
for(i in 1:ncol(data)) {
result[i] <- cor(data[,i], rowSums(data[-i]))
}
result
}

`piPlot` <-
function(dat, params){
dat <- as.matrix(dat)
dat <- dat[rowSums(dat)!=0,]         # get rid of all incorrect
dat <- dat[rowSums(dat)!=ncol(dat),] # get rid of all perfect
  theta <- apply(dat, 1, theta.max, params)
plot(density(theta), ylim=c(0,1), lty=1, main = "Person-Item Plot", xlab = "Theta")
lines(density(params), lty=2)
}

`rasch` <-
function(b,theta){
   1 / (1 + exp(outer(b,theta,'-')))
   }

`scoreCon` <-
function(b_vector){ # b_vector is the vector of item parameters
q <- length(b_vector)
max.score <- q-1
mat <- matrix(numeric(q * q), ncol = q)
for(i in 1:q){
mat[,i] <- c(rep(1, i), rep(0, q-i))
}
scores <- apply(mat[,1:max.score], 2, theta.max, b_vector)
Raw.Score <- colSums(mat)[1:max.score]
data.frame(Raw.Score = Raw.Score, Theta = scores)
}

`simRasch` <-
function(Nt, Nb, mu=0, sigma=1) {
theta <- rnorm(Nt, mu, sigma)
b <- runif(Nb,-3,3)
results <- matrix(numeric(length(theta) * length(b)), ncol = length(b))
results <- as.data.frame(results)
results <- cbind(results, theta)
for(i in seq(along=b)) {
results[,i] <- 1/ (1 + exp(b[i]-results$theta))
}
response.matrix <- subset(results, select=(-theta))
uniform <- matrix(c(runif(Nt*length(b))), ncol=length(b))
for(i in 1:length(b)){
response.matrix[,i] <- ifelse(response.matrix[,i] < uniform[,i], 0, 1) 
}
list(data = response.matrix, generating.values = b, theta = theta)
}

`theta.max` <-
function(x, betas){
  opt <- function(theta) -sum(dbinom(x, 1, plogis(theta - betas), log = TRUE))
  out <- optim(log(sum(x)/(length(x)/sum(x))), opt, method = "BFGS", hessian = TRUE)
  out$par
  }

