.packageName <- "ada"
".onUnload" <-
function(libpath)
    library.dynam.unload("ada", libpath)

".noGenerics" <-
TRUE
"ada" <-
function(x,...)UseMethod("ada")

"ada.default" <-
function(x,y,test.x,test.y=NULL,loss=c("exponential","logistic"),
         type=c("discrete","real","gentle"),iter=50, nu=0.1, bag.frac=0.5,
         model.coef=TRUE,bag.shift=FALSE,max.iter=20,delta=10^(-10),verbose=FALSE,
         na.action=na.rpart,...){
  cl<-match.call(expand.dots=TRUE)
  cl[[1]]<-as.name("ada")
  
  type = match.arg(type)
  if(missing(type)){
    type="discrete"
  }
  if(missing(loss))
    loss="exponential"
  loss=tolower(loss)
  if(loss=="e"|loss=="ada"){
    loss="exponential"
  }
  if(loss=="l"|loss=="l2"){
    loss="logistic"
  }
  if(missing(y) | missing(x)){
    stop("This procedure requires a response and a set of variables")
  }
  lev=levels(as.factor(y))
  if(length(lev)!=2)
    stop("Currently this procedure can not directly handle > 2 class response\n")
  x=as.data.frame(x)
  test.dat=FALSE
  if(!is.null(test.y)){
    tlev=NULL
    if(!missing(test.x)){
      tlev=levels(as.factor(test.y))
      test.x=as.data.frame(test.x)
      test.dat=TRUE
    }
    if(length(tlev)<1)
      warning("test.x must be the testing data and the response must have 2 levels")
  }else{
    test.x=NULL
  }
  if(length(lev)>2){
    stop(paste("Error:  At this time ",cl[[3]]," must have 2 levels",sep=""))
  }
  extraArgs=list(...)
  if (length(extraArgs)) {
    arg <- names(formals(rpart))
    indx <- match(names(extraArgs), arg, nomatch = 0)
    if (any(indx == 0)) 
      stop(paste("Error:  Argument", names(extraArgs)[indx == 0], 
                 "not matched"))
  }
  ny<-c(-1,1)[as.numeric(as.factor(y))]
  if(test.dat)
    test.y<-c(-1,1)[as.numeric(as.factor(test.y))]

  ### Set Up Predictions for each boosting type
  method="class"
  if(type=="discrete"){
    predict.type<-function(f,dat){
      a=c(-1,1)[apply(predict(f,newdata=dat,type="prob"),1,which.max)]
      a
    }
  }
  if(type=="real"){
    predict.type<-function(f,dat){
      a=predict(f,newdata=dat,type="prob")[,2]
      ind<-a==1|(1-a)==1
      if(length(ind)>1){
        a[ind]=(1-a[ind])*0.0001+a[ind]*.9999
      }
      f=0.5*log(a/(1-a))
      if(sum(is.nan(f))>0)
        f[is.nan(f)]=0.51
      f
    }
  }
  if(type=="gentle"){
    predict.type<-function(f,dat){
      predict(f,newdata=dat)
    }
    method="anova"
  }

  ### Set up coeficients
  wfun<-function(yf,f){
    exp(-yf*f)
  }
  if(loss=="logistic"){
    wfun<-function(yf,f){
      a=exp(-yf*f)
      a/(1+a)
    }
  }
  coefs=function(wts,eta,yf,alp){
    1
  }
  if(model.coef){
    if(loss=="exponential"){
      if(type=="discrete"){ ### eta is assumed = err
        coefs=function(eta,pval,yf,alp){
          alp
        }
      }else{
        coefs=function(wts,eta,yf,alp){
          alpst=alp
          for(i in 1:max.iter){
            alp0=alp
            pval=wts*exp(-yf*eta*alp)
            m11<-sum(eta*pval*eta)
            m12<-sum(yf*pval*eta)
            if(m11==0){
              alp=alpst
              break
            }
            alp=alp+m12/m11
            a1=(alp-alp0)^2
            if(a1<delta){
              break
            }
          }
          if(verbose)
            cat("FINAL: iter=",i," rate=",a1,"\n")
          alp
        }
      }
    }else{
      coefs=function(wts,eta,yf,alp){
        alpst=alp
        for(i in 1:max.iter){
          alp0=alp
          pval=wts*exp(-yf*eta*alp)
          pval=1-1/(1+pval)
          m11<-sum(eta*pval*eta)
          m12<-sum(yf*pval*eta)
          if(m11==0){
            alp=alpst
            break
          }
          alp=alp+m12/m11
          a1=(alp-alp0)^2
          if(a1<delta){
            break
          }
          alp=alp+m12/m11
        }
        if(verbose)
          cat("FINAL: iter=",i," rate=",a1,"\n")
        2*alp
      }
    }
  }
  lossObj=list(coefs=coefs,wfun=wfun,predict.type=predict.type,method=method,type=type,loss=loss,shift=bag.shift)
  result =ada.machine(x,ny,test.x,test.y,iter,nu,bag.frac,lossObj,oldObj=NULL,...)
  g=as.factor(lev[as.numeric(as.factor(sign(result$F[[1]])))])
  tab=table(as.factor(y),g,dnn=c("True value","Final Prediction"))
  nm<-1:(dim(x)[2])
  if(is.data.frame(x)){
    nm=names(x)
  } 
  obj=structure(list(model=result,fit=g,call=cl,confusion=tab,iter=iter,
    actual=as.vector(y),nu=nu,dim=dim(x),names=nm,bag.frac=bag.frac),class="ada")
  obj
}

"ada.formula" <-
function(formula, data,...,subset,na.action=na.rpart){
  m = match.call(expand.dots = FALSE)
  m[[1]] = as.name("model.frame")
  m$...=NULL
  m =eval(m,parent.frame())
  
  Terms = attr(m, "terms")
  y = as.vector(model.extract(m,"response"))
  preds<-attr(attributes(m)$terms,"term.labels")
  x<-as.data.frame(m[,!is.na(match(names(m),preds))])
  res = ada.default(x,y,...)
  res$terms = Terms
  cl = match.call()
  cl[[1]] = as.name("ada")
  res$call = cl
  res
}


"ada.machine" <-
function(x,y,test.x,test.y,iter=50,nu=0.1,bag.frac=0.5,lossObj,oldObj=NULL,...){
  kapstat<-function(tab=diag(2) ){
    if(dim(tab)[1]==1){
      return(0)
    }
    if(dim(tab)[1]==dim(tab)[2]){
      rs<-apply(tab,2,sum)
      cs<-apply(tab,1,sum)
      N<-sum(rs)
      E<-sum(rs*cs)/N^2
      O<-sum(diag(tab))/N
      return( (O-E)/(1-E) )
    }else{
      return(0.5)
    }
  }
  tmp<-function(i){
    a1<-sample(which(y==1),1)
    a2<-sample(which(y==-1),1)
    ind<-c(a1,a2)
    return(c(sample(setdiff(1:n,ind),n-val-2,FALSE),ind))
  }

  n=dim(x)[1]
  fit=list()
  y<-as.numeric(y)
  dat<-data.frame(y=y,x)
  
  w=rep(1,n)/n
  oobm.mat<-matrix(0,nrow=n,ncol=2)
  fits=rep(0,n)
  
  atmp=alpha=vector(length=iter)
  oobm.err<-rep(0,iter)
  train.err<-rep(0,iter)
  train.kap<-rep(0,iter)
  start=0
  if(!is.null(test.y)){
    fit=oldObj$model$trees
    test.err<-rep(0,iter)
    test.kap<-rep(0,iter)
    test.n<-dim(test.x)[1]
    fits.test<-rep(0,test.n)
  }
  if(!is.null(oldObj)){
    fit=oldObj$model$trees
    w=oldObj$model$lw
    oobm.mat=oldObj$model$oob.str$oobm.mat
    fits=oldObj$model$F[[1]]
    start=oldObj$iter
    alpha[1:start]<-oldObj$model$alpha
    train.err[1:(start)]<-oldObj$model$err[,1]
    train.kap[1:(start)]<-oldObj$model$err[,2]
    oobm.err[1:(start)]<-oldObj$model$oob.str$oobm.err
    nu=oldObj$nu
    bag.frac=oldObj$bag.frac
    if(!is.null(test.y)){
      test.err[1:start]<-oldObj$model$err[,3]
      test.kap[1:start]<-oldObj$model$err[,4]
      fits.test<-oldObj$model$F[[2]]
    }
  }  
  val<-floor(bag.frac*n)
  a<-NULL
  if(val<n){
    a<-sapply(1:iter,tmp)
  } 
  if(is.vector(a)){
    a<-t(as.matrix(a))
  }
  start<-start +1
  wfun=lossObj$wfun
  coefs=lossObj$coefs
  method=lossObj$method
  predict.type=lossObj$predict.type
  shift=lossObj$shift
  f1<-f2<-0
  for (m in start:iter){
    xval=1:n
    if(!is.null(a)){
      xval=a[,m]
    }
    fit[[m]] =rpart(y~.,data=dat[xval,],weights=w[xval],method=method,x=FALSE,y=FALSE,...)
    f<-predict.type(fit[[m]],dat)
    errm=sum(w*(sign(f)!=y))
    if( (1-errm)==1 | errm==1 ){
      errm=(1-errm)*0.0001+errm*.9999
    }
    alp=0.5*log( (1-errm)/errm)
    alpha[m]=nu*coefs(w,f,y,alp)
    fits<-fits+alpha[m]*f
    if(shift){
      f1=(1-nu)*f+fits
      atmp[m]=1-nu+alpha[m]
    }
    w=wfun(y,fits)
    w=w/sum(w)
    tab<-table(sign(fits),y)
    train.err[m]<-1-sum(diag(tab))/n
    train.kap[m]<-1-kapstat(tab)
    
    indx<- setdiff(1:n,xval)
    btmp<-as.numeric(as.factor(sign(fits)[indx]))
    if(length(btmp)==1){
      oobm.mat[indx,btmp]<-oobm.mat[indx,btmp]+1
    }else{
      oobm.mat[indx,][btmp==1,1]<- oobm.mat[indx,][btmp==1,1]+1
      oobm.mat[indx,][btmp==2,2]<- oobm.mat[indx,][btmp==2,2]+1
      denom<-apply(oobm.mat,1,sum)
      vals<-denom>0
      if(sum(vals)==1){
	ytr<-c(-1,1)[which.max(oobm.mat[vals,])]
      }else{
        ytr<-c(-1,1)[apply(oobm.mat[vals,],1,which.max)]
      }
      oobm.err[m]<-sum(ytr!=y[vals])/length(vals)
    }
    if(is.null(test.y)){
      next
    }
    fit1<-predict.type(fit[[m]],test.x)
    fits.test<-fits.test +alpha[m]*fit1
    if(shift){
      f2=(1-nu)*fit1+fits.test
    }
    tab<-table(sign(fits.test),test.y)
    test.err[m]<- 1-sum(diag(tab))/test.n
    test.kap[m]<-1-kapstat(tab)
  }
  if(shift){
    alpha=atmp
    fits<-f1
    fits.test<-f2
  }
  a1=(fits==0)
  if(sum(a1)>0)
    fits[a1]<-sample(c(-1,1),sum(a1),TRUE,c(.5,.5))
  errs<-cbind(train.err,train.kap)
  ans<-list()
  ans[[1]]=fits
  if(!is.null(test.y)){
    errs<-cbind(errs,test.err,test.kap)
    ans[[2]]=fits.test
  }
  obj=list(trees=fit,alpha=alpha,F=ans,errs=errs,oob.str=list(oobm.err=oobm.err,oobm.mat=oobm.mat),lw=w,shift=shift,lossObj=lossObj)
  return(obj)
}
"addtest" <-
function(x,test.x,test.y,...){
  if(!inherits(x,"ada")){
    stop("Error:  Object is not of class ada")
  }
  if(missing(test.y)){
    stop("This funciton needs a tesing response")
  }
  if(missing(test.x)){
    stop("This function needs testing data")
  }
  kapstat<-function(tab=diag(2) ){
    if(dim(tab)[1]==1){
      return(0)
    }
    if(dim(tab)[1]==dim(tab)[2]){
      rs<-apply(tab,2,sum)
      cs<-apply(tab,1,sum)
      N<-sum(rs)
      E<-sum(rs*cs)/N^2
      O<-sum(diag(tab))/N
      return( (O-E)/(1-E) )
    }else{
      return(NA)
    }
  }
  iter=x$iter
  lev=levels(as.factor(test.y))
  if(length(lev)>2){
    stop("Error The response must have 2 levels")
  }
  y=c(-1,1)[as.numeric(as.factor(test.y))]
  nt<-"vector"
  test.errs<-test.kaps<-rep(0,iter)
  test.x=as.data.frame(test.x)

  f<-x$model$lossObj$predict.type
  tmp=sapply(1:iter,function(i)f(f=x$model$trees[[i]],dat=test.x))
  tmp=t(t(tmp)*x$model$alpha)
  Fm=rep(0,length(y))
  for(m in 1:iter){
    Fm<-Fm+tmp[,m]
    tab<-matrix(table(sign(Fm),y),nrow=2,ncol=2)
    test.errs[m]<-1-sum(diag(tab))/sum(tab)
    test.kaps[m]<-1-kapstat(tab)
  }
  x$model$errs=cbind(x$model$errs,test.errs,test.kaps)
  n1<-length(x$model$F)
  x$model$F[[n1+1]]<-Fm
  x
}

"pairs.ada" <-
function(x,train.data=NULL,vars=NULL,maxvar=10,test.x=NULL,test.y=NULL,test.only=FALSE,col=c(2,4),pch=c(1,2),...){
  if(class(x)!="ada"){
    stop("Error:  Object is not of class ada")
  }
  if(is.null(train.data) & is.null(test.x)){
    stop("Note:  The train.data must correspond to the predictors used in adaboost")
  }
  if(maxvar<2){
    warning("Note: maxvar must be greater than 2.  So this variable will be ignored...")
    maxvar=10
  }
  iter<-x$iter
  y<-as.numeric(as.factor(as.vector(x$actual)))+1
  lev<-levels(as.factor(y))
  if(!is.null(train.data))
    ptrain<-predict(x,newdata=train.data,type="both")
  ptest<-list(class=NULL,probs=NULL)
  if(!is.null(test.x)){
    if(!is.null(train.data)){
      if(dim(train.data)[2]!= dim(test.x)[2]){
        stop("Error:  The test data set must have the same number of variables as the train.data")
      }
    }
    ptest<-predict(x,test.x,type="both")
    if(is.null(test.y)){
      test.y<-rep(1,dim(test.x)[1])
      y<-c(y,as.numeric(test.y))
    }else{
      test.y=as.numeric(lev[as.numeric(as.factor(test.y))])
    }
  }
  ptrain$class<-c(ptrain$class,ptest$class)
  ptrain$probs<-rbind(ptrain$probs,ptest$probs)
  if(test.only & !is.null(test.x)){
    ptrain$class<-c(ptest$class)
    ptrain$probs<-rbind(ptest$probs)
    y<-as.numeric(test.y)
    train.data=NULL
  }
  tr<-y
  mat<-ptrain
  train.data<-as.data.frame(train.data)
  if(is.null(vars)&(!test.only)){
    if(dim(train.data)[2]>maxvar){
      if(!is.data.frame(train.data)){
        stop(" train.data must be of type data.frame")
      }
      vars<-names(varplot(x,plot.it=FALSE,type="scores",max.var.show=maxvar))
    }else{
      vars<-1:(x$dim[2])
    }
  }
  if(test.only & is.null(vars)){
    if(dim(test.x)[2]>maxvar){
      if(!is.data.frame(test.x)){
        stop(" test.x must be of type data.frame")
      }
      vars<-names(varplot(x,plot.it=FALSE,type="scores",max.var.show=maxvar))
      match(vars,val$names)
    }else{
      vars<-1:(x$dim[2])
    }
  }
 panel.up<-function(x,y,...){
   val<-as.numeric(as.factor(tr))
   points(x,y,col=col[val],pch=pch[val])
 }
 panel.low<-function(x,y,...){
   val<-as.numeric(as.factor(mat$class))
   points(x,y,col=col[val],cex=apply(mat$probs,1,max),pch=pch[val])
 }
 pairs(as.matrix(rbind(train.data,test.x))[,vars],lower.panel=panel.low,upper.panel=panel.up,...)
}

"plot.ada" <-
function(x,kappa=FALSE,test=FALSE,cols= rainbow(dim(x$model$errs)[2]+1),tflag=TRUE,...){
  if(!inherits(x,"ada")){
    stop("Error:  Object is not of class ada")
  }
  kapstat<-function(tab=diag(2) ){
    if(dim(tab)[1]==1){
      return(0)
    }
    if(dim(tab)[1]==dim(tab)[2]){
      rs<-apply(tab,2,sum)
      cs<-apply(tab,1,sum)
      N<-sum(rs)
      E<-sum(rs*cs)/N^2
      O<-sum(diag(tab))/N
      return( (O-E)/(1-E) )
    }else{
      return(NA)
    }
  }
  mat<-x$model$errs
  iter<-x$iter
  k<-dim(mat)[2]/2
  if(length(cols)<(k*2))
    cols=rep(cols,k*2)
  old.par <- par(no.readonly = TRUE)
  if(kappa)
    op <- par(mfrow = c(1, 2),...)     
  odds<-1:k*2-1
  vals<-range(mat[,odds])
  vals[2]<-vals[2]*1.1
  plot(mat[,1],xlab=paste("Iteration",1,"to",iter),ylab="Error",
       ylim=vals,cex.lab=1,cex.main=1.2,type="l",col=cols[1],lwd=1)
  axis(1,at=iter,font=2)
  indx<-1:5 * floor((iter-5)/5)
  if(iter<=5){
    indx<-3
  }else{
    if(iter<=10){
      indx<-c(3,7)
    }else{
      if(iter<=15)
        indx<-c(3,7,12)
    }
  }
  points(indx,mat[indx,1],pch="1",cex=1.5)
  leg<-"Train"
  if(test & k>1){
     if(tflag)
       title("Training And Testing Error")
     matlines(1:iter,mat[,odds[-1]],col=cols[odds[-1]],lty=1)
     points(rep(indx,length(odds[-1])),as.vector(mat[indx,odds[-1]]),
            pch=paste(sort(rep(2:k,length(indx)))),cex=1.5)
     leg=c("Train",paste("Test",1:(k-1),sep=""))
  }else{
    if(tflag)
      title("Training Error")
  }
  legend(par()$usr[1],par()$usr[4],leg,pch=paste(1:k))  
  if(kappa){
    odds<-odds+1
    mat[,odds]=1-mat[,odds]
    vals<-range(mat[,odds])
    
    plot(mat[,2],xlab=paste("Iteration",1,"to",iter),ylab="Kappa Accuracy",
         ylim=vals,cex.lab=1,cex.main=1.2,type="l",col=cols[2],lwd=1)
    axis(1,at=iter,font=2)
    points(indx,mat[indx,2],pch="1",cex=1.5)
    leg="Train"
    if(test & k>1){
      if(tflag)
        title("Training And Testing Kappas")
      matlines(1:iter,mat[,odds[-1]],col=cols[odds[-1]],lty=1)
      points(rep(indx,length(odds[-1])),as.vector(mat[indx,odds[-1]]),
             pch=paste(sort(rep(2:k,length(indx)))),cex=1.5)
      leg=c("Train",paste("Test",1:(k-1),sep=""))
    }else{
      if(tflag)
        title("Training Kappa")
    }
      legend(par()$usr[1],par()$usr[4],leg,pch=paste(1:k))  
  }
  on.exit(par(old.par))
}
"predict.ada" <-
function(object,newdata=NULL,type=c("vector","prob","both","F"),n.iter=NULL,...){
  if(!inherits(object,"ada")){
    stop("Error:  Object is not of class ada")
  }
  if(missing(type)){
    type="vector"
  }
  if(type!="vector" & type!="prob" & type!="both" & type!="F"){
    warning(paste("type=",type," is undefined:  default is 'vector'..  "))
    type="vector"
  }
  if(missing(newdata)){
    stop("Error: Arguement newdata is missing and must be supplied\n")
  }
  iter=object$iter
  if(!is.null(n.iter)){
    if(n.iter<iter)
      iter=n.iter
  }
  lev<-levels(as.factor(object$actual))
  const<-2
 
  f<-object$model$lossObj$predict.type
  tmp=sapply(1:iter,function(i)f(f=object$model$trees[[i]],dat=newdata))
  tmp=t(t(tmp)*object$model$alpha)
  tmp<-apply(tmp,1,sum)
  fit<-as.vector(sign(tmp))
  fit<-as.factor(fit)
  a1=(fit==0)
  if(sum(a1)>0)
    fit[a1]<-sample(c(-1,1),sum(a1),TRUE,c(.5,.5))
  attr(fit,"levels")<-lev
  if(type=="vector")
    return(fit)
   cal<-function(x,const){
     if(x>0)
       return(c(exp(-const*x),1))
     return(c(1,exp(const*x)))
   }
  probs<-t(sapply(tmp,cal,const=const))
  probs<-probs/apply(probs,1,sum)
  if(type=="prob")
    return(probs)
  if(type=="F")
    return(tmp)
  return(list(class=fit,probs=probs))
}

"print.ada" <-
function(x,...){
  kapstat<-function(tab=diag(2) ){
    if(dim(tab)[1]==1){
      return(0)
    }
    if(dim(tab)[1]==dim(tab)[2]){
      rs<-apply(tab,2,sum)
      cs<-apply(tab,1,sum)
      N<-sum(rs)
      E<-sum(rs*cs)/N^2
      O<-sum(diag(tab))/N
      return( (O-E)/(1-E) )
    }else{
      return(NA)
    }
  }
  if(!is.null(cl <- x$call)) {
    names(cl)[2] <- ""
    cat("Call:\n")
    dput(cl)
  }
  
  g=x$fit
  tab=x$confusion
  errm=1-sum(diag(tab))/length(x$actual)
  
  cat("\nLoss:", x$model$lossObj$loss,"Method:", x$model$lossObj$type,"\  Iteration:", x$iter,"\n")
  cat("\nFinal Confusion Matrix for Data:\n")
  print(tab)
  cat("\nTrain Error:", round(errm, digits=3),"\n")
  if(x$iter<=5){
    j<-which.min(x$model$oob.str$oobm.err)
  }else{
    j<-which.min(x$model$oob.str$oobm.err[-c(1:5)])+5
  }
  cat("\nOut-Of-Bag Error: ",round(x$model$oob.str$oobm.err[j],3)," iteration=",j,"\n")
  cat("\nAdditional Estimates of number of iterations:\n\n")
  errs<-as.data.frame(x$model$errs)
  k<-dim(errs)[2]/2
  names(errs)<-paste(names(errs),sort(c(1:k,1:k)),sep="")
  est.m<-apply(errs,2,which.min)
  print(est.m)
  cat("\n")
  invisible(x)
}

"summary.ada" <-
function(object,n.iter=NULL,...){
  if(!inherits(object,"ada")){
    stop("Error:  Object is not of class ada")
  }
  kapstat<-function(tab=diag(2) ){
    if(dim(tab)[1]==1){
      return(0)
    }
    if(dim(tab)[1]==dim(tab)[2]){
      rs<-apply(tab,2,sum)
      cs<-apply(tab,1,sum)
      N<-sum(rs)
      E<-sum(rs*cs)/N^2
      O<-sum(diag(tab))/N
      return( (O-E)/(1-E) )
    }else{
      return(NA)
    }
  }
  if(!is.null(cl <- object$call)) {
    names(cl)[2] <- ""
    cat("Call:\n")
    dput(cl)
  }
  iter=object$iter
  if(!is.null(n.iter)){
    if(n.iter<iter)
      iter=n.iter
  }
  g=object$fit
  tab=object$confusion
  errm=object$model$errs[iter,]
  cat("\nLoss:",object$model$lossObj$loss,"Method:", object$model$lossObj$type,"\  Iteration:", iter,"\n")

  cat("\nTraining Results\n")

  cat("\nAccuracy:", round(1-errm[1],digits=3),
      "Kappa:",round(1-errm[2], digits=3) ,"\n\n")
  k<-length(errm)/2
  if(k>1){
    l<-3
    cat("Testing Results\n")
    for(i in 2:k){
      cat("\nAccuracy:", round(1-errm[l], digits=3))
      l<-l+1
      cat(" Kappa:", round(1-errm[l], digits=3),"\n")
      l<-l+1
    }
    cat("\n\n")
  }
}

"update.ada" <-
function(object,x,y,test.x,test.y=NULL, n.iter,...){
  if(class(object)!="ada"){
    stop("Update routine is for an ada object only")
  }
  if(missing(y) | missing(x)){
    stop("This procedure requires a response and a set of variables")
  }
  if(missing(n.iter)){
	stop("The new number of iterations must be specified")
  } 
  if(n.iter<=object$iter){
    return(object)
  }
  lev=levels(as.factor(y))
  x=as.data.frame(x)
  test.dat=FALSE
  if(!is.null(test.y)){
    tlev=NULL
    if(!missing(test.x)){
      tlev=levels(as.factor(test.y))
      test.x=as.data.frame(test.x)
      test.dat=TRUE
    }
    if(length(tlev)<1)
      warning("test.x must be the testing data and the response must have 2 levels")
  }else{
    test.x=NULL
  }
  ny<-c(-1,1)[as.numeric(as.factor(y))]
  if(test.dat)
    test.y<-c(-1,1)[as.numeric(as.factor(test.y))]
  control=as.list(object$model$trees[[1]]$control)
  lossObj=object$model$lossObj
  result= result =ada.machine(x,ny,test.x,test.y,n.iter,lossObj=lossObj,oldObj=object,control=control)
  g=as.factor(lev[as.numeric(as.factor(sign(result$F[[1]])))])
  tab=table(as.factor(y),g,dnn=c("True value","Final Prediction"))
  obj=structure(list(model=result,fit=g,call=object$call,confusion=tab,iter=n.iter,
    actual=as.vector(y),nu=object$nu,dim=object$dim,bag.frac=object$bag.frac,names=object$names),class="ada")
  obj
}

"varplot" <-
function(x,plot.it=TRUE,type=c("none","scores"),max.var.show=30,...){
  if(class(x)!="ada"){
    stop("Object must be of type 'ada'")
  }
  if(missing(type)){
    type="none"
  }
  iter<-x$iter
  nm<-x$names
  vec<-rep(0,length(nm))
  p=x$dim[2]
  g1<-function(i,obj){
    if(dim(obj[[i]]$frame)[1]<2){
      return(rep(0,p))
    }
    imp<-obj[[i]]$splits[,3]^2
    vals<-match(row.names(obj[[i]]$splits),nm)
    vec=rep(0,p)
    vec[vals]<-imp
    vec
  }
  vec<-1/iter*sqrt(apply(sapply(1:iter,function(i)g1(i,x$model$trees)),1,sum))
 
  vars<- order(vec,decreasing=TRUE)
  n<-length(vec)
  max.v=max.var.show
  if(p<max.v)
    max.v=p
  if(plot.it==TRUE){
    dotchart(vec[vars[max.v:1]],labels=nm[vars[max.v:1]],xlab="Score",main="Variable Importance Plot")
  }
  if(type=="scores"){
    vars=vars[1:max.v]
    t1<-vec[vars]
    attr(t1,"names")<-nm[vars]
    return(t1)
 }
}

