[r-cran-eco] 07/30: Import Upstream version 3.0-2

Andreas Tille tille at debian.org
Thu Sep 7 07:20:58 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository r-cran-eco.

commit 427238c2d8e913b323caa43034151806fee42aa3
Author: Andreas Tille <tille at debian.org>
Date:   Thu Sep 7 08:59:21 2017 +0200

    Import Upstream version 3.0-2
---
 DESCRIPTION             |   31 +-
 NAMESPACE               |    9 +-
 R/checkdata.R           |   23 +-
 R/cov.eco.R             |   24 -
 R/ecoBD.R               |   10 +-
 R/ecoCV.R               |  110 +++
 R/ecoRC.R               |   76 ++
 R/emeco.R               |  183 +++++
 R/eminfo.R              |  583 ++++++++++++++
 R/print.ecoML.R         |   38 +
 R/print.summary.eco.R   |   17 +-
 R/print.summary.ecoML.R |   37 +
 R/print.summary.ecoNP.R |   25 +-
 R/summary.eco.R         |   29 +-
 R/summary.ecoML.R       |  128 +++
 R/summary.ecoNP.R       |   30 +-
 data/forgnlit30.txt     |   49 ++
 data/forgnlit30c.txt    | 1977 +++++++++++++++++++++++++++++++++++++++++++++++
 data/housep88.txt       |  425 ++++++++++
 data/reg.txt            |  552 ++++++-------
 data/wallace.txt        | 1010 ++++++++++++++++++++++++
 eco.pdf                 |  Bin 192714 -> 0 bytes
 man/census.Rd           |    2 +-
 man/eco.Rd              |   55 +-
 man/ecoBD.Rd            |    4 +-
 man/ecoML.Rd            |  238 ++++++
 man/ecoNP.Rd            |   65 +-
 man/forgnlit30.Rd       |   37 +
 man/forgnlit30c.Rd      |   39 +
 man/housep88.Rd         |   44 ++
 man/predict.eco.Rd      |   15 +-
 man/predict.ecoNP.Rd    |   13 +-
 man/summary.eco.Rd      |   11 +-
 man/summary.ecoML.Rd    |   87 +++
 man/summary.ecoNP.Rd    |    8 +-
 man/wallace.Rd          |   37 +
 src/fintegrate.c        |  352 +++++++++
 src/fintegrate.h        |   23 +
 src/gibbsBase.c         |    7 +-
 src/gibbsBase2C.c       |  204 +++++
 src/gibbsBaseRC.c       |  285 +++++++
 src/gibbsDP.c           |   14 +-
 src/gibbsEM.c           | 1434 ++++++++++++++++++++++++++++++++++
 src/gibbsXBase.c        |    8 +-
 src/gibbsXDP.c          |    8 +-
 src/gibbsZBase.c        |  408 ++++++++++
 src/macros.h            |   82 ++
 src/preBaseX.c          |    8 +-
 src/preDP.c             |    8 +-
 src/preDPX.c            |    8 +-
 src/rand.c              |   98 ++-
 src/rand.h              |    9 +-
 src/subroutines.c       |  277 ++++++-
 src/subroutines.h       |    7 +-
 src/vector.c            |   57 +-
 55 files changed, 8763 insertions(+), 555 deletions(-)

diff --git a/DESCRIPTION b/DESCRIPTION
index 8b886d3..61abba2 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,18 +1,27 @@
 Package: eco
-Version: 2.2-2
-Date: 2006-09-23
-Title: R Package for Fitting Bayesian Models of Ecological Inference in 2x2 Tables
+Version: 3.0-2
+Date: 2007-1-11
+Title: R Package for Ecological Inference in 2x2 Tables
 Author: Kosuke Imai <kimai at princeton.edu>,
-        Ying Lu <ylu at iq.harvard.edu>.
+        Ying Lu <ying.lu at colorado.edu>,
+	Aaron Strauss <abstraus at Princeton.EDU>.
 Maintainer: Kosuke Imai <kimai at princeton.edu>
-Depends: R (>= 2.0), MASS, utils
-Description: eco is a publicly available R package that fits the
-             parametric and nonparametric Bayesian models for
-             ecological inference in 2x2 tables. The models are fit
-             using the Markov chain Monte Carlo algorithms that are
-             described in Imai and Lu (2004, 2005).
+Depends: R (>= 2.0), MASS
+Description: eco is a publicly available R package that implements the
+  Bayesian and likelihood methods proposed in Imai, Lu, and Strauss (2006)
+  for ecological inference in $2 \times 2$ tables as well as the
+  method of bounds introduced by Duncan and Davis (1953). The package
+  fits both parametric and nonparametric models using either the
+  Expectation-Maximization algorithms (for likelihood models) or the
+  Markov chain Monte Carlo algorithms (for Bayesian models).  For all
+  models, the individual-level data can be directly incorporated into
+  the estimation whenever such data are available. Along with
+  in-sample and out-of-sample predictions, the package also provides a
+  functionality which allows one to quantify the effect of data
+  aggregation on parameter estimation and hypothesis testing under the
+  parametric likelihood models.
 LazyLoad: yes
 LazyData: yes
 License: GPL (version 2 or later)
 URL: http://imai.princeton.edu/research/eco.html
-Packaged: Sat Sep 23 14:31:01 2006; kimai
+Packaged: Thu Jan 11 08:41:29 2007; kimai
diff --git a/NAMESPACE b/NAMESPACE
index 63640e8..501ff0f 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -2,14 +2,16 @@ useDynLib(eco)
 
 importFrom(MASS, mvrnorm)
 
-export(
-       eco, 	
+export(eco, 	
        ecoBD,
        ecoNP,
+       ecoML,
        summary.eco,
        summary.ecoNP,
+       summary.ecoML,
        print.summary.eco,
        print.summary.ecoNP,
+       print.summary.ecoML,
        predict.eco,
        predict.ecoX,
        predict.ecoNP,
@@ -25,10 +27,13 @@ S3method(predict, ecoNP)
 S3method(predict, ecoNPX)
 S3method(summary, eco)
 S3method(summary, ecoNP)
+S3method(summary, ecoML)
 S3method(summary, predict.eco)
 S3method(print, eco)
+S3method(print, ecoML)
 S3method(print, ecoBD)
 S3method(print, summary.eco)
 S3method(print, summary.ecoNP)
+S3method(print, summary.ecoML)
 S3method(print, summary.predict.eco)
 
diff --git a/R/checkdata.R b/R/checkdata.R
index 683f070..973eedd 100644
--- a/R/checkdata.R
+++ b/R/checkdata.R
@@ -32,19 +32,18 @@ checkdata <- function(X,Y, supplement, ndim) {
    res$d <- cbind(res$X.use, res$Y.use)
 
    ## check survey data
-  
    if (any(supplement <0) || any(supplement >1)) 
-      stop("survey data have to be between 0 and 1.")  
-   if ((dim(supplement)[2] != ndim) && (length(supplement)>0))
-      stop("when context=TRUE, use n by 3 otherwise use n by 2 matrix\ 
-           for survey data, when context=TRUE")
-   if (is.null(supplement)) 
-      res$survey.samp <- res$survey.data <- res$survey.yes <- 0
-   else {
-      res$survey.samp <- length(supplement[,1])
-      res$survey.data <- as.matrix(supplement)
-      res$survey.yes <- 1
-   }
+      stop("survey data have to be between 0 and 1.")
+   if(is.null(supplement))
+     res$survey.samp <- res$survey.data <- res$survey.yes <- 0
+   else
+     if (dim(supplement)[2] != ndim)
+       stop("when context=TRUE, use n by 3. Otherwise use n by 2 matrix for survey data")
+     else {
+       res$survey.samp <- length(supplement[,1])
+       res$survey.data <- as.matrix(supplement)
+       res$survey.yes <- 1
+     }
 
    return(res)
 
diff --git a/R/cov.eco.R b/R/cov.eco.R
deleted file mode 100644
index d59bab6..0000000
--- a/R/cov.eco.R
+++ /dev/null
@@ -1,24 +0,0 @@
-cov.eco <- function(object, subset = NULL, ...) {
-  if (is.null(subset))
-    subset <- 1:nrow(object$mu)
-  else if (max(subset) > nrow(object$mu))
-    stop(paste("invalid input for `subset.' only", nrow(mu), "draws are stored."))
-
-  p <- ncol(object$mu)
-  n <- length(subset)
-  Sigma <- array(0, c(p, p, n))
-  cov <- object$Sigma
-  for (i in 1:n) {
-    count <- 1
-    for (j in 1:p) {
-      Sigma[j,j:p,i] <- cov[subset[i],count:(count+p-j)]
-      count <- count + p - j + 1
-    }
-    diag(Sigma[,,i]) <- diag(Sigma[,,i]/2)
-    Sigma[,,i] <- Sigma[,,i] + t(Sigma[,,i])
-  }
-  if (n > 1)
-    return(Sigma)
-  else
-    return(Sigma[,,1])  
-}
diff --git a/R/ecoBD.R b/R/ecoBD.R
index e9dbdcf..ce2835d 100644
--- a/R/ecoBD.R
+++ b/R/ecoBD.R
@@ -70,6 +70,8 @@ ecoBD <- function(formula, data = parent.frame(), N=NULL){
     dimnames(Wmin) <- dimnames(Wmax) <-
       list(if (is.null(rownames(X))) 1:n.obs else rownames(X),
            rlab, clab)
+    colnames(X) <- clab
+    colnames(Y) <- rlab
     if (!is.null(N)) {
       Nmin <- Nmax <- array(NA, c(n.obs, R, C), dimnames =
                             dimnames(Wmin))
@@ -88,13 +90,13 @@ ecoBD <- function(formula, data = parent.frame(), N=NULL){
                                list(dimnames(Wmin)[[2]], dimnames(Wmin)[[3]]))
   if (is.null(N))
     for (j in 1:C) {
-      aggWmin[,j] <- apply(Wmin[,,j]*X[,j], 2, mean)
-      aggWmax[,j] <- apply(Wmax[,,j]*X[,j], 2, mean)
+      aggWmin[,j] <- apply(Wmin[,,j], 2, weighted.mean, X[,j])
+      aggWmax[,j] <- apply(Wmax[,,j], 2, weighted.mean, X[,j])
     }
   else
     for (j in 1:C) {
-      aggWmin[,j] <- apply(Wmin[,,j]*X[,j], 2, weighted.mean, N)
-      aggWmax[,j] <- apply(Wmax[,,j]*X[,j], 2, weighted.mean, N)
+      aggWmin[,j] <- apply(Wmin[,,j], 2, weighted.mean, X[,j]*N)
+      aggWmax[,j] <- apply(Wmax[,,j], 2, weighted.mean, X[,j]*N)
     }
 
   if (!is.null(Nmin) & !is.null(Nmax)) {
diff --git a/R/ecoCV.R b/R/ecoCV.R
new file mode 100644
index 0000000..1b04c9e
--- /dev/null
+++ b/R/ecoCV.R
@@ -0,0 +1,110 @@
+ecoX <- function(formula, Z, supplement = NULL, data = parent.frame(), 
+                 nu0 = 4, S0 = 10, beta0 = 0, A0 = 100,		
+                 grid = FALSE, parameter = FALSE,
+                 n.draws = 5000, burnin = 0, thin = 5, verbose = TRUE){ 
+
+  ## checking inputs
+  if (burnin >= n.draws)
+    stop("Error: n.draws should be larger than burnin")
+  
+  call <- match.call()
+
+  ff <- as.formula(paste(call$Y, "~ -1 +", call$X))
+  if (is.matrix(eval.parent(call$data)))
+    data <- as.data.frame(data)
+  X <- model.matrix(ff, data)
+  Y <- model.response(model.frame(ff, data=data))
+  
+  ##survey data
+  if (length(supplement) == 0) {
+    survey.samp <- 0
+    survey.data <- 0
+    survey.yes<-0
+  }
+  else {
+    survey.samp <- length(supplement[,1])
+    survey.data <- as.matrix(supplement)
+    survey.yes<-1
+  }
+  
+  ind<-c(1:length(X))
+  X1type<-0
+  X0type<-0
+  samp.X1<-0
+  samp.X0<-0
+  X1.W1<-0
+  X0.W2<-0
+  
+  ##Xtype x=1
+  X1.ind<-ind[along=(X==1)]
+  if (length(X[X!=1])<length(X)){
+    X1type<-1
+    samp.X1<-length(X1.ind)
+    X1.W1<-Y[X1.ind]
+  }
+  
+  ##Xtype x=0
+  X0.ind<-ind[along=(X==0)]
+  if (length(X[X!=0])<length(X)){
+    X0type<-1
+    samp.X0<-length(X0.ind)
+    X0.W2<-Y[X0.ind]
+  }
+  
+  XX.ind<-setdiff(ind, union(X0.ind, X1.ind))
+  X.use<-X[XX.ind]
+  Y.use<-Y[XX.ind]
+
+  order.old<-order(c(XX.ind, X0.ind, X1.ind))
+  
+  ## fitting the model
+  n.samp <- length(Y.use)	 
+  d <- cbind(X.use, Y.use)
+
+  n.a <- floor((n.draws-burnin)/thin)
+  n.par <- n.a
+  n.w <- n.a * (n.samp+samp.X1+samp.X0) 
+  unit.a <- 1
+  unit.par <- 1
+  unit.w <- (n.samp+samp.X1+samp.X0) 	
+  Zmat<-Z%x%diag(1, 2)
+  print(Zmat)
+  Zp<-dim(Zmat)[2]
+  cat("Zp", Zp)
+  if (is.null(beta0)) beta0<-rep(0, Zp)
+  if (is.null(A0)) A0<-diag(0.01, Zp)
+  print(beta0)
+  print(A0)
+  n.a.b<-n.a*Zp
+  n.a.V<-n.a*3
+  res <- .C("cBaseecoZ", as.double(d), as.double(Zmat), as.integer(Zp),  
+            as.integer(n.samp), as.integer(n.draws), as.integer(burnin), as.integer(thin),
+            as.integer(verbose),
+            as.integer(nu0), as.double(S0),
+            as.double(beta0), as.double(A0),
+            as.integer(survey.yes), as.integer(survey.samp), as.double(survey.data),
+            as.integer(X1type), as.integer(samp.X1), as.double(X1.W1),
+            as.integer(X0type), as.integer(samp.X0), as.double(X0.W2),
+            as.integer(predict), as.integer(parameter), 
+            pdSBeta=double(n.a.b),
+            pdSSigma=double(n.a.V),
+            pdSW1=double(n.w), pdSW2=double(n.w), 
+            pdSWt1=double(n.w), pdSWt2=double(n.w), PACKAGE="eco")
+  
+  if (parameter) {
+    beta.post <- matrix(res$pdSBeta, n.a, Zp, byrow=TRUE) 
+    Sigma.post <- matrix(res$pdSSigma, n.a, 3, byrow=TRUE)
+    colnames(Sigma.post) <- c("Sigma11", "Sigma12", "Sigma22")
+  }
+  W1.post <- matrix(res$pdSW1, n.a, unit.w, byrow=TRUE)[,order.old]
+  W2.post <- matrix(res$pdSW2, n.a, unit.w, byrow=TRUE)[,order.old]
+  
+  res.out <- list(model="Normal prior", burnin=burnin, thin = thin, X=X, Y=Y,
+                  nu0=nu0, A0=A0, beta0=beta0, S0=S0, call=call, beta.post=beta.post,
+                  Sigma.post=Sigma.post, W1.post=W1.post, W2.post=W2.post)
+
+  class(res.out) <- c("ecoCV", "eco")
+  return(res.out)
+}
+
+
diff --git a/R/ecoRC.R b/R/ecoRC.R
new file mode 100644
index 0000000..d51da88
--- /dev/null
+++ b/R/ecoRC.R
@@ -0,0 +1,76 @@
+ecoRC <- function(formula, data = parent.frame(),
+                  mu0 = 0, tau0 = 2, nu0 = 4, S0 = 10, mu.start = 0,
+                  Sigma.start = 1, reject = TRUE, maxit = 10e5,
+                  parameter = TRUE,
+                  n.draws = 5000, burnin = 0, thin = 0, verbose = FALSE){ 
+  
+  ## checking inputs
+  if (burnin >= n.draws)
+    stop("n.draws should be larger than burnin")
+  mf <- match.call()
+
+  ## getting X, Y, and N
+  tt <- terms(formula)
+  attr(tt, "intercept") <- 0
+  if (is.matrix(eval.parent(mf$data)))
+    data <- as.data.frame(data)
+  X <- model.matrix(tt, data)
+  n.samp <- nrow(X)
+  C <- ncol(X)
+  Y <- matrix(model.response(model.frame(tt, data = data)),
+              nrow = n.samp)
+  R <- ncol(Y)
+
+  ## fitting the model
+  n.store <- floor((n.draws-burnin)/(thin+1))
+  tmp <- ecoBD(formula, data=data)
+
+  res.out <- list(call = mf, X = X, Y = Y, Wmin = tmp$Wmin, Wmax = tmp$Wmax)
+  if (R == 1) {
+    mu0 <- rep(mu0, C)
+    S0 <- diag(S0, C)
+    mu.start <- rep(mu.start, C)
+    Sigma.start <- diag(Sigma.start, C)
+    res <- .C("cBase2C", as.double(X), as.double(Y),
+              as.double(tmp$Wmin[,1,]), as.double(tmp$Wmax[,1,]),
+              as.integer(n.samp), as.integer(C), as.integer(reject),
+              as.integer(maxit), as.integer(n.draws), as.integer(burnin),
+              as.integer(thin+1), as.integer(verbose),
+              as.integer(nu0), as.double(tau0),
+              as.double(mu0), as.double(S0), as.double(mu.start),
+              as.double(Sigma.start),
+              as.integer(parameter), pdSmu = double(n.store*C),
+              pdSSigma = double(n.store*C*(C+1)/2),
+              pdSW = double(n.store*n.samp*C), PACKAGE="eco")
+    res.out$mu <- matrix(res$pdSmu, n.store, C, byrow=TRUE)
+    res.out$Sigma <- matrix(res$pdSSigma, n.store, C*(C+1)/2, byrow=TRUE)
+    res.out$W <- array(res$pdSW, c(C, n.samp, n.store))
+  }
+  else {
+    mu0 <- rep(mu0, R-1)
+    S0 <- diag(S0, R-1)
+    mu.start <- matrix(rep(rep(mu.start, R-1), C), nrow = R-1, ncol = C,
+                       byrow = FALSE)
+    Sigma.start <- array(rep(diag(Sigma.start, R-1), C), c(R-1, R-1, C))
+    res <- .C("cBaseRC", as.double(X), as.double(Y[,1:(R-1)]),
+              as.double(tmp$Wmin[,1:(R-1),]), as.double(tmp$Wmax[,1:(R-1),]),
+              as.integer(n.samp), as.integer(C), as.integer(R),
+              as.integer(reject), as.integer(maxit),
+              as.integer(n.draws), as.integer(burnin),
+              as.integer(thin+1), as.integer(verbose),
+              as.integer(nu0), as.double(tau0),
+              as.double(mu0), as.double(S0),
+              as.double(mu.start), as.double(Sigma.start),
+              as.integer(parameter), pdSmu = double(n.store*C*(R-1)),
+              pdSSigma = double(n.store*C*(R-1)*R/2),
+              pdSW = double(n.store*n.samp*(R-1)*C), PACKAGE="eco")
+    res.out$mu <- array(res$pdSmu, c(R-1, C, n.store))
+    res.out$Sigma <- array(res$pdSSigma, c(R*(R-1)/2, C, n.store))
+    res.out$W <- array(res$pdSW, c(R-1, C, n.samp, n.store))
+  }
+  
+  class(res.out) <- c("ecoRC", "eco")
+  return(res.out)
+}
+
+
diff --git a/R/emeco.R b/R/emeco.R
new file mode 100644
index 0000000..9eabaeb
--- /dev/null
+++ b/R/emeco.R
@@ -0,0 +1,183 @@
+
+###
+### main function
+###
+ecoML <- function(formula, data = parent.frame(), N=NULL, supplement = NULL, 
+                  theta.start = c(0,0,1,1,0), fix.rho = FALSE,
+                  context = FALSE, sem = TRUE, epsilon=10^(-10),
+                  maxit = 1000, loglik = TRUE, hyptest=FALSE, verbose= TRUE) { 
+
+  
+  ## getting X and Y
+  mf <- match.call()
+  tt <- terms(formula)
+  attr(tt, "intercept") <- 0
+  if (is.matrix(eval.parent(mf$data)))
+    data <- as.data.frame(data)
+  X <- model.matrix(tt, data)
+  Y <- model.response(model.frame(tt, data=data))
+  
+  #n.var: total number of parameters involved in the estimation
+  #n.par: number of nonstatic paramters need to estimate through EM 
+  #       also need SEM
+  #ndim: dimension of the multivariate normal distribution
+
+  ndim<-2
+  if (context) ndim<-3
+
+  n.var<-2*ndim+ ndim*(ndim-1)/2
+ 
+  n.par<-n.S<-n.var 
+  if (context) {
+      n.par<-n.var-2
+   }
+
+  r12<-NULL
+  if (fix.rho) 
+     r12<-theta.start[n.par]
+
+  if (!context & fix.rho) n.par<-n.par-1
+
+  flag<-as.integer(context)+2*as.integer(fix.rho)+2^2*as.integer(sem)
+
+  ##checking data
+  tmp <- checkdata(X, Y, supplement, ndim)
+  bdd <- ecoBD(formula=formula, data=data)
+  n <- tmp$n.samp+tmp$survey.samp+tmp$samp.X1+tmp$samp.X0
+  wcol<-ndim
+  if (context) {
+    wcol<-wcol-1
+  }
+  inSample.length <- wcol*tmp$n.samp
+
+  #if NCAR and the user did not provide a theta.start
+  if (context && (length(theta.start)==5) ) 
+    theta.start<-c(0,0,1,1,0,0,0)
+
+  ## Fitting the model via EM  
+  res <- .C("cEMeco", as.double(tmp$d), as.double(theta.start),
+            as.integer(tmp$n.samp),  as.integer(maxit), as.double(epsilon),
+            as.integer(tmp$survey.yes), as.integer(tmp$survey.samp), 
+            as.double(tmp$survey.data),
+            as.integer(tmp$X1type), as.integer(tmp$samp.X1), as.double(tmp$X1.W1),
+            as.integer(tmp$X0type), as.integer(tmp$samp.X0), as.double(tmp$X0.W2),
+            as.double(bdd$Wmin[,1,1]), as.double(bdd$Wmax[,1,1]),
+            as.integer(flag),as.integer(verbose),as.integer(loglik),as.integer(hyptest),
+            optTheta=rep(-1.1,n.var), pdTheta=double(n.var),
+            S=double(n.S+1),inSample=double(inSample.length),DMmatrix=double(n.par*n.par),
+            itersUsed=as.integer(0),history=double((maxit+1)*(n.var+1)),
+            PACKAGE="eco")
+
+  ##record results from EM
+  theta.em<-res$pdTheta
+  theta.fisher<-param.trans(theta.em, transformation="Fisher")
+  iters.em<-res$itersUsed
+  mu.log.em <- matrix(rep(NA,iters.em*ndim),ncol=ndim)
+  sigma.log.em <- matrix(rep(NA,iters.em*ndim),ncol=ndim)
+  loglike.log.em <- as.double(rep(NA,iters.em))
+  nrho<-length(theta.em)-2*ndim
+  rho.fisher.em <- matrix(rep(NA,iters.em*nrho),ncol=nrho)
+  for(i in 1:iters.em) {
+    mu.log.em[i,1:ndim]=res$history[(i-1)*(n.var+1)+(1:ndim)]
+    sigma.log.em[i,1:ndim]=res$history[(i-1)*(n.var+1)+ndim+(1:ndim)]
+     if (nrho!=0)
+    rho.fisher.em[i, 1:nrho]=res$history[(i-1)*(n.var+1)+2*ndim+(1:nrho)]
+    loglike.log.em[i]=res$history[(i-1)*(n.var+1)+2*ndim+nrho+1]
+  }
+
+  ## In sample prediction of W
+  W <- matrix(rep(NA,inSample.length),ncol=wcol)
+  for (i in 1:tmp$n.samp)
+    for (j in 1:wcol)
+      W[i,j]=res$inSample[(i-1)*2+j]
+
+  ## SEM step
+  iters.sem<-0
+
+   suff.stat<-res$S
+  if (context)
+      {
+     suff.stat<-rep(0,(n.var+1))
+         suff.stat[1]<-mean(logit(c(X,supplement[,3])))
+         suff.stat[2:3]<-res$S[1:2]
+         suff.stat[4]<-mean((logit(c(X, supplement[,3])))^2)
+         suff.stat[5:6]<-res$S[3:4]
+         suff.stat[7:8]<-res$S[6:7]
+         suff.stat[9]<-res$S[5]
+         suff.stat[10]<-res$S[8]
+      }
+
+
+  if (sem) 
+  {
+
+    DM <- matrix(rep(NA,n.par*n.par),ncol=n.par)
+
+    res <- .C("cEMeco", as.double(tmp$d), as.double(theta.start),
+              as.integer(tmp$n.samp),  as.integer(maxit), as.double(epsilon),
+              as.integer(tmp$survey.yes), as.integer(tmp$survey.samp), 
+              as.double(tmp$survey.data),
+              as.integer(tmp$X1type), as.integer(tmp$samp.X1), as.double(tmp$X1.W1),
+              as.integer(tmp$X0type), as.integer(tmp$samp.X0), as.double(tmp$X0.W2),
+              as.double(bdd$Wmin[,1,1]), as.double(bdd$Wmax[,1,1]),
+              as.integer(flag),as.integer(verbose),as.integer(loglik),as.integer(hyptest),
+              res$pdTheta, pdTheta=double(n.var), S=double(n.S+1),
+              inSample=double(inSample.length),DMmatrix=double(n.par*n.par),
+              itersUsed=as.integer(0),history=double((maxit+1)*(n.var+1)),
+              PACKAGE="eco")     
+  
+    iters.sem<-res$itersUsed
+    for(i in 1:n.par)
+      for(j in 1:n.par)
+        DM[i,j]=res$DMmatrix[(i-1)*n.par+j]
+
+
+} 
+
+ 
+   
+  if (!context) names(theta.em)<-c("u1","u2","s1","s2","r12")
+  if (context) names(theta.em)<-c("ux","u1","u2","sx","s1","s2","r1x","r2x","r12")
+
+
+
+  ## output
+  res.out<-list(call = mf, Y = Y, X = X, N = N, 
+                fix.rho = fix.rho, context = context, sem=sem, epsilon=epsilon,
+        theta.em=theta.em, r12=r12, 
+               sigma.log = theta.fisher[(ndim+1):(2*ndim)], suff.stat = suff.stat[1:n.S],
+                loglik = res$S[n.S+1], iters.em = iters.em, 
+                iters.sem = iters.sem, mu.log.em = mu.log.em, 
+                sigma.log.em = sigma.log.em,
+                rho.fisher.em = rho.fisher.em, loglike.log.em = loglike.log.em,
+                W = W)
+  
+  if (sem) {
+    res.out$DM<-DM
+#print(dim(data))
+# n<-dim(data)[1]
+if (!is.null(supplement)) n<-n+dim(supplement)[1]
+#cat("n2=", n,"\n")
+
+ res.info<- ecoINFO(theta.em=res.out$theta.em, suff.stat=res.out$suff.stat, DM=res.out$DM, context=context, fix.rho=fix.rho, sem=sem, r12=res.out$r12, n=n)
+
+    res.out$DM<-res.info$DM
+    res.out$Icom<-res.info$Icom
+    res.out$Iobs<-res.info$Iobs
+    res.out$Fmis<-res.info$Fmis
+    res.out$Vobs.original<-res.info$Vobs.original
+    res.out$Vobs<-res.info$Vobs
+    res.out$Iobs<-res.info$Iobs
+    res.out$VFmis<-res.info$VFmis
+    res.out$Icom.trans<-res.info$Icom.trans
+    res.out$Iobs.trans<-res.info$Iobs.trans
+    res.out$Fmis.trans<-res.info$Fmis.trans
+    res.out$Imiss<-res.info$Imiss
+    res.out$Ieigen<-res.info$Ieigen
+
+ res.out$Iobs<-res.info$Iobs
+}
+
+  class(res.out) <- "ecoML"
+  return(res.out)
+}
diff --git a/R/eminfo.R b/R/eminfo.R
new file mode 100644
index 0000000..d04045e
--- /dev/null
+++ b/R/eminfo.R
@@ -0,0 +1,583 @@
+##logit and invlogit transformation
+logit <- function(X) 
+  { 
+    Y<-log(X/(1-X))
+    Y
+   }
+
+invlogit <-function(Y) 
+   {
+     X<-exp(Y)/(1+exp(Y))
+     X
+   }
+
+####assuming theta.em
+##2 d: mu1, mu2, sig1, sig2, r12
+##3 d: mu3, mu1, mu2, sig3, sig1, sig2, r13, r23, r12
+param.pack<-function(theta.em, fix.rho=FALSE,r12=0, dim=ndim) 
+  {
+    mu<-rep(0, dim)
+    Sig<-matrix(0,dim, dim)
+
+    mu<-theta.em[1:dim]
+        
+    for (i in 1:dim) 
+      Sig[i,i]<-theta.em[dim+i]
+
+   if (!fix.rho) {
+      Sig[1,2]<-Sig[2,1]<-theta.em[2*dim+1]*sqrt(Sig[1,1]*Sig[2,2])
+      if (dim==3) {
+        Sig[1,3]<-Sig[3,1]<-theta.em[2*dim+2]*sqrt(Sig[1,1]*Sig[3,3])
+        Sig[2,3]<-Sig[3,2]<-theta.em[2*dim+3]*sqrt(Sig[2,2]*Sig[3,3])
+      }
+   }
+  if (fix.rho) {
+    if (dim==2)
+       Sig[1,2]<-Sig[2,1]<-r12*sqrt(Sig[1,1]*Sig[2,2])
+    if (dim==3) {
+      Sig[1,2]<-Sig[2,1]<-theta.em[2*dim+1]*sqrt(Sig[1,1]*Sig[2,2])
+      Sig[1,3]<-Sig[3,1]<-theta.em[2*dim+2]*sqrt(Sig[1,1]*Sig[3,3])
+      Sig[2,3]<-Sig[3,2]<-r12*sqrt(Sig[2,2]*Sig[3,3])
+   }
+  }
+  return(list(mu=mu, Sigma=Sig))
+}
+
+## transformation of BVN parameter into
+## Fisher scale or unit scale 
+## in 2 D, mu1, mu2, sigma1, sigma2, r12
+## in 3 D, mu3, mu1, mu2, sigma3, sigma1, sigma2, sigma31, sigma32, sigma12
+param.trans <-function(X, transformation="Fisher") {
+  p<-length(X) 
+  Y<-rep(0,p)
+
+  if (transformation=="Fisher") {
+    if (p<=5) {
+      Y[1:2]<-X[1:2]
+      Y[3:4]<-log(X[3:4])
+      if (p==5) 
+        Y[5]<-0.5*log((1+X[5])/(1-X[5]))
+     }
+
+     if (p>5) {
+       Y[1:3]<-X[1:3]
+       Y[4:6]<-log(X[4:6])
+       Y[7:8]<-0.5*log((1+X[7:8])/(1-X[7:8]))
+       if (p==9)
+         Y[9]<-0.5*log((1+X[9])/(1-X[9]))
+      }
+   }
+
+   if (transformation=="unitscale") {
+     if (p<=5) {
+    Y[1:2] <- invlogit(X[1:2])
+        Y[3:4] <- X[3:4]*exp(2*X[1:2])/(1+exp(X[1:2]))^4
+        if (p==5) 
+          Y[5] <- X[5]
+    }
+
+    if (p>5) {
+    Y[1:3]<-invlogit(X[1:3])
+        Y[4:6]<-X[4:6]*exp(2*X[4:6])/(1+exp(X[4:6]))^4
+        Y[7:8]<-X[7:8]
+        if (p==9)
+           Y[9]<-X[9]
+      }
+  }
+  return(Y)
+}
+
+vec<-function(mat) {
+  v<-as.vector(mat, mode="any")
+  v
+}
+
+tr<-function(mat) {
+  trace<-sum(diag(mat))
+  trace
+}
+## I_{com} 
+## the gradient function for multivariate normal function
+#du.theta and dSig.theta are the first derivative of mu and Sigma 
+#with respect to theta
+#du.theta[n.u, n.theta]
+#dSig.theta[n.u, n.u, n.theta]
+
+d1st.mvn<-function(mu,Sigma, fix.rho=FALSE) {
+   #r12, r13,r23 are internal here, 
+   # r12 doesn't correspond to cor(w1, w2) in 3d case (intead, r12=>cor(W1,x)
+   d<-length(mu)
+   p<-d+d+d*(d-1)/2
+   u1<-mu[1]
+   u2<-mu[2]
+   s1<-Sigma[1,1]
+   s2<-Sigma[2,2]
+   r12<-Sigma[1,2]/sqrt(s1*s2)
+
+   if (d==3) {
+   u3<-mu[3]
+   s3<-Sigma[3,3]
+   r13<-Sigma[1,3]/sqrt(s1*s3)
+   r23<-Sigma[2,3]/sqrt(s2*s3)
+   }
+ 
+   if (fix.rho) p<-p-1
+ 
+    du.theta<-matrix(0,d,p)
+    for (j in 1:d) 
+      du.theta[j,j]<-1
+
+    dSig.theta<-array(0,c(d,d,p))
+ 
+      for (i in 1:d) 
+        dSig.theta[i,i,d+i]<-1
+
+      dSig.theta[1,2,d+1]<-dSig.theta[2,1,d+1]<-1/2*s1^(-1/2)*s2^(1/2)*r12
+      dSig.theta[1,2,d+2]<-dSig.theta[2,1,d+2]<-1/2*s2^(-1/2)*s1^(1/2)*r12
+      if (d==3) {
+        dSig.theta[1,3,d+1]<-dSig.theta[3,1,d+1]<-1/2*s1^(-1/2)*s3^(1/2)*r13
+        dSig.theta[1,3,d+3]<-dSig.theta[3,1,d+3]<-1/2*s3^(-1/2)*s1^(1/2)*r13
+        dSig.theta[2,3,d+2]<-dSig.theta[3,2,d+2]<-1/2*s2^(-1/2)*s3^(1/2)*r23
+        dSig.theta[2,3,d+3]<-dSig.theta[3,2,d+3]<-1/2*s3^(-1/2)*s2^(1/2)*r23
+      }
+    
+    if (!fix.rho) {
+        dSig.theta[1,2,2*d+1]<-dSig.theta[2,1,2*d+1]<-sqrt(s1*s2)
+        if (d==3) {
+          dSig.theta[1,3,2*d+2]<-dSig.theta[3,1,2*d+2]<-sqrt(s1*s3)
+          dSig.theta[2,3,2*d+3]<-dSig.theta[3,2,2*d+3]<-sqrt(s2*s3)
+        }
+     }
+     if (fix.rho) {
+        if (d==3) {
+          dSig.theta[1,3,2*d+1]<-dSig.theta[3,1,2*d+1]<-sqrt(s1*s3)
+          dSig.theta[2,3,2*d+2]<-dSig.theta[3,2,2*d+2]<-sqrt(s2*s3)
+        }
+     }
+    
+   return(list(du.theta=du.theta, dSig.theta=dSig.theta))
+}
+
+d2nd.mvn<-function(mu,Sigma,  fix.rho=FALSE) {
+   #r12, r13,r23 are internal here, 
+   # r12 doesn't correspond to cor(w1, w2) in 3d case (intead, r12=>cor(W1,x)
+   d<-length(mu)
+   p<-d+d+d*(d-1)/2
+   u1<-mu[1]
+   u2<-mu[2]
+   s1<-Sigma[1,1]
+   s2<-Sigma[2,2]
+   r12<-Sigma[1,2]/sqrt(s1*s2)
+   if (d==3) {
+   u3<-mu[3]
+   s3<-Sigma[3,3]
+   r13<-Sigma[1,3]/sqrt(s1*s3)
+   r23<-Sigma[2,3]/sqrt(s2*s3)
+   }
+
+   if (fix.rho) p<-p-1
+
+   ddu.theta<-array(0,c(d,p,p))
+
+   ddSig.theta<-array(0,c(d,d,p,p))
+
+     ddSig.theta[1,2,d+1,d+1]<-ddSig.theta[2,1,d+1,d+1]<- -1/4*s1^(-3/2)*s2^(1/2)*r12
+     ddSig.theta[1,2,d+1,d+2]<-ddSig.theta[2,1,d+1,d+2]<- 1/4*s1^(-1/2)*s2^(-1/2)*r12
+     ddSig.theta[1,2,d+2,d+2]<-ddSig.theta[2,1,d+2,d+2]<- -1/4*s1^(1/2)*s2^(-3/2)*r12
+ 
+     if (d==3) {
+     ddSig.theta[1,3,d+1,d+1]<-ddSig.theta[3,1,d+1,d+1]<- -1/4*s1^(-3/2)*s3^(1/2)*r13
+     ddSig.theta[1,3,d+1,d+3]<-ddSig.theta[3,1,d+1,d+3]<- 1/4*s1^(-1/2)*s3^(-1/2)*r13
+
+     ddSig.theta[2,3,d+2,d+2]<-ddSig.theta[3,2,d+2,d+2]<- -1/4*s2^(-3/2)*s3^(1/2)*r23
+     ddSig.theta[2,3,d+2,d+3]<-ddSig.theta[3,2,d+2,d+3]<- 1/4*s2^(-1/2)*s3^(-1/2)*r23
+
+     ddSig.theta[1,3,d+3,d+3]<-ddSig.theta[3,1,d+3,d+3]<- -1/4*s1^(1/2)*s3^(-3/2)*r13
+     ddSig.theta[2,3,d+3,d+3]<-ddSig.theta[3,2,d+3,d+3]<- -1/4*s2^(1/2)*s3^(-3/2)*r23
+
+     }
+
+     if (!fix.rho) {
+       ddSig.theta[1,2,d+1,2*d+1]<-ddSig.theta[2,1,d+1,2*d+1]<- 1/2*s1^(-1/2)*s2^(1/2)
+       ddSig.theta[1,2,d+2,2*d+1]<-ddSig.theta[2,1,d+2,2*d+1]<- 1/2*s1^(1/2)*s2^(-1/2)
+
+       if (d==3) {
+         ddSig.theta[1,3,d+1,2*d+2]<-ddSig.theta[3,1,d+1,2*d+2]<- 1/2*s1^(-1/2)*s3^(1/2)
+         ddSig.theta[2,3,d+2,2*d+3]<-ddSig.theta[3,2,d+2,2*d+3]<- 1/2*s2^(-1/2)*s3^(1/2)
+         ddSig.theta[1,3,d+3,2*d+2]<-ddSig.theta[3,1,d+3,2*d+2]<- 1/2*s1^(1/2)*s3^(-1/2)
+         ddSig.theta[2,3,d+3,2*d+3]<-ddSig.theta[3,2,d+3,2*d+3]<- 1/2*s2^(1/2)*s3^(-1/2)
+     }
+   }
+    if (fix.rho) {
+       if (d==3) {
+
+       ddSig.theta[1,2,d+1,2*d+1]<-ddSig.theta[2,1,d+1,2*d+1]<- 1/2*s1^(-1/2)*s3^(1/2)
+       ddSig.theta[2,3,d+2,2*d+2]<-ddSig.theta[3,2,d+2,2*d+2]<- 1/2*s2^(-1/2)*s3^(1/2)
+         ddSig.theta[1,3,d+3,2*d+1]<-ddSig.theta[3,1,d+3,2*d+1]<- 1/2*s1^(1/2)*s3^(-1/2)
+         ddSig.theta[2,3,d+3,2*d+2]<-ddSig.theta[3,2,d+3,2*d+2]<- 1/2*s2^(1/2)*s3^(-1/2)
+     }
+   }
+
+      for (i in 1:(p-1)) 
+        for (j in (i+1):p) {
+         ddSig.theta[,,j,i]<-ddSig.theta[,,i,j]
+         ddu.theta[,j,i]<-ddu.theta[,i,j]
+      }
+
+      return(list(ddu.theta=ddu.theta, ddSig.theta=ddSig.theta))
+}
+
+##assuming the order of sufficient statistics
+## 2d, mean(W1), mean(W2), mean(W1^2) mean(W2^2), mean(W1W2)
+## 3d, mean(X), mean(W1), mean(W2), mean(X^2),mean(W1^2) mean(W2^2),
+##     mean(XW1), mean(XW2), mean(W1W2)
+
+suff<-function(mu, suff.stat,n) {
+
+   d<-length(mu)
+   p<-d+d+d*(d-1)/2 
+   u1<-mu[1]
+   u2<-mu[2]
+   if (d==3)  u3<-mu[3]
+
+   S1<-n*suff.stat[1]
+   S2<-n*suff.stat[2]
+   S11<-n*suff.stat[d+1]
+   S22<-n*suff.stat[d+2]
+   S12<-n*suff.stat[2*d+1]
+   if (d==3) {
+      S3<-n*suff.stat[d]
+      S33<-n*suff.stat[2*d]
+      S13<-n*suff.stat[2*d+2]
+      S23<-n*suff.stat[2*d+3]
+   }
+
+   Vv<-rep(0,d)
+   Vv[1]<-S1-n*u1
+   Vv[2]<-S2-n*u2
+   if (d==3) Vv[3]<-S3-n*u3
+
+   Ss<-matrix(0,d,d)
+   Ss[1,1]<-S11-2*S1*u1+n*u1^2 
+   Ss[2,2]<-S22-2*S2*u2+n*u2^2
+   Ss[1,2]<-Ss[2,1]<-S12-S1*u2-S2*u1+n*u1*u2
+   if (d==3) {
+      Ss[3,3]<-S33-2*S3*u3+n*u3^2
+      Ss[1,3]<-Ss[3,1]<-S13-S1*u3-S3*u1+n*u1*u3
+      Ss[2,3]<-Ss[3,2]<-S23-S3*u2-S2*u3+n*u2*u3
+  }
+  return(list(Ss=Ss, Vv=Vv))
+}
+
+
+
+#du.theta and dSig.theta are the second derivative of mu and Sigma 
+#with respect to theta
+#ddu.theta[n.u, n.theta, n.theta]
+#ddSig.theta[n.u, n.u, n.theta, n.theta]
+
+##comput the gradient vector (expected first derivatives) for MVN
+##not actually used here. 
+
+Dcom.mvn<-function(mu, Sigma, suff.stat,n, fix.rho=FALSE) {
+  d<-dim(Sigma)[1]
+  p<-d*2+0.5*d*(d-1)
+
+  if (fix.rho) { 
+    p<-p-1 
+  }
+
+  Dcom<-rep(0,p)
+  invSigma<-solve(Sigma)
+
+  temp<-suff(mu, suff.stat, n)
+  Ss<-temp$Ss
+  Vv<-temp$Vv
+
+  temp<-d1st.mvn(mu=mu, Sigma=Sigma, fix.rho=fix.rho)
+  du.theta<-temp$du.theta
+  dSig.theta<-temp$dSig.theta
+
+  for (i in 1:p)  
+   Dcom[i]<- -n/2*t(vec(invSigma))%*%vec(dSig.theta[,,i])+ 0.5*tr(invSigma%*%dSig.theta[,,i]%*%invSigma%*%Ss)+ t(du.theta[,i])%*%invSigma%*%Vv
+
+   Dcom
+}
+
+
+#compute the information matrix of MVN
+# -1*second derivatives
+
+Icom.mvn<-function(mu, Sigma, suff.stat,n, fix.rho=FALSE) {
+   d<-dim(Sigma)[1]
+   p<-d*2+1/2*d*(d-1)
+
+   if (fix.rho) 
+     { 
+       p<-p-1 
+     }
+
+   Icom<-matrix(0,p,p)
+
+   invSigma<-solve(Sigma)
+
+   temp<-suff(mu, suff.stat, n)
+   Ss<-temp$Ss
+   Vv<-temp$Vv
+
+   temp<-d1st.mvn(mu, Sigma, fix.rho)
+   du.theta<-temp$du.theta
+   dSig.theta<-temp$dSig.theta
+
+   temp<-d2nd.mvn(mu, Sigma, fix.rho)
+   ddu.theta<-temp$ddu.theta
+   ddSig.theta<-temp$ddSig.theta
+
+   for (i in 1:p) {
+     dinvSig.theta.i<- -invSigma%*%dSig.theta[,,i]%*%invSigma
+     for (j in 1:i) {
+      dinvSig.theta.j<- -invSigma%*%dSig.theta[,,j]%*%invSigma
+      ddinvSig.theta.ij<- -dinvSig.theta.j%*%dSig.theta[,,i]%*%invSigma -invSigma%*%ddSig.theta[,,i,j]%*%invSigma-invSigma%*%dSig.theta[,,i]%*%dinvSig.theta.j
+ 
+       a1<- -n/2*(t(vec(dinvSig.theta.j))%*%vec(dSig.theta[,,i]) + t(vec(invSigma))%*%vec(ddSig.theta[,,i,j]))
+                    
+       a2<- t(du.theta[,j])%*%dinvSig.theta.i%*%Vv - 0.5*tr(ddinvSig.theta.ij%*%Ss)
+
+     a3<- t(ddu.theta[,i,j])%*%invSigma%*%Vv + t(du.theta[,i])%*%dinvSig.theta.j%*%Vv - n*t(du.theta[,i])%*%invSigma%*%du.theta[,j]
+
+       Icom[i,j]<-a1+a2+a3
+    
+       if (i!=j) Icom[j,i]<-Icom[i,j]
+     }
+   }
+   -Icom
+}
+
+ 
+###compute the information matrix for various parameter transformation
+### "Fisher" transformation (variance stablization?)
+### unit scale transformation: first order approximation of mean and var, rho
+
+##express T1 and T2 in more general form
+
+Icom.transform<-function(Icom, Dvec, theta, transformation="Fisher", context, fix.rho)
+  {  
+
+      if (!context) {
+
+        mu<-theta[1:2]
+        sigma<-theta[3:4]
+        rho<-theta[5]
+      }
+      if (context) {
+
+        mu<-theta[1:3]   # x,w1,w2
+        sigma<-theta[4:6] #x, w1, w2
+        rho<-theta[7:9]   #r_xw1, r_xw2, r_w1w2
+      }
+    
+    ##T1: d(theta)/d(f(theta)), theta is the MVN parameterization
+    ##T2, d2(theta)/d(f(theta))(d(f(theta))')
+
+    ### transformation=Fisher, Icom_normal==>Icom_fisher
+
+    Imat<- -Icom
+    n.par<-dim(Imat)[1]
+
+    if (transformation=="Fisher") {
+     if (!context) {
+         T1<-c(1,1,sigma[1], sigma[2])
+
+         T2<-matrix(0, n.par^2, n.par)
+         T2[(2*n.par+3), 3]<-sigma[1]    
+         T2[(3*n.par+4), 4]<-sigma[2]     
+
+         if (!fix.rho) {
+           T1<-c(T1, (1-(rho[1]^2)))
+           T2[(4*n.par+5),5]<- -2*rho[1]*(1-rho[1]^2)
+         }
+    
+        T1<-diag(T1)
+     }
+
+     if (context) {
+         T1<-c(1,1,1,sigma[1:3],(1-(rho[1:2]^2)))
+
+         T2<-matrix(0, n.par^2, n.par)
+         T2[(3*n.par+4), 4]<-sigma[1]    
+         T2[(4*n.par+5), 5]<-sigma[2]     
+         T2[(5*n.par+6), 6]<-sigma[3]     
+         T2[(6*n.par+7),7]<- -2*rho[1]*(1-rho[1]^2)
+         T2[(7*n.par+8),8]<- -2*rho[2]*(1-rho[2]^2)
+
+         if (!fix.rho) {
+           T1<-c(T1, (1-(rho[3]^2)))
+           T2[(8*n.par+9),9]<- -2*rho[3]*(1-rho[3]^2)
+         }
+    
+        T1<-diag(T1)
+    }
+}
+    ### transformation=unitscale, Icom_normal==>Icom_unitscale
+   if (transformation=="unitscale") {
+
+      T1<-matrix(0,n.par,n.par)
+      T1[1,1]<-exp(-mu[1])*(1+exp(mu[1]))^2
+      T1[1,3]<-1/(sigma[1]*2*exp(2*mu[1])*(1+exp(mu[1]))^(-4)*(1-2*(1+exp(mu[1]))^(-1)))
+ 
+      T1[2,2]<-exp(-mu[2])*(1+exp(mu[2]))^2
+      T1[2,4]<-1/(sigma[2]*2*exp(2*mu[2])*(1+exp(mu[2]))^(-4)*(1-2*(1+exp(mu[2]))^(-1)))
+      
+      T1[3,3]<-2*sigma[1]^0.5*(1+exp(mu[1]))^4*exp(-2*mu[1])
+      T1[4,4]<-2*sigma[2]^0.5*(1+exp(mu[2]))^4*exp(-2*mu[2])
+
+
+   #   T2<-matrix(0, n.par^2, n.par)
+   #   T2[1,1]<-
+   #   T2[(1*n.par+2), (1*n.par+2)]<-
+
+
+   ##compute T1 and T2 
+
+   }   
+ 
+    Icom.tran<-matrix(NA, n.par, n.par)
+    Icom.tran<-T1%*%Imat%*%t(T1)
+    
+    temp1<-matrix(0,n.par,n.par)
+    for (i in 1:n.par)
+      for (j in 1:n.par) 
+       temp1[i,j]<- Dvec%*%T2[((i-1)*n.par+(1:n.par)),j] 
+
+      Icom.tran<-Icom.tran+temp1     
+    return(-Icom.tran)
+}
+
+
+ecoINFO<-function(theta.em, suff.stat, DM, context=TRUE, fix.rho=FALSE, sem=TRUE, r12=0, n)
+  {
+
+    if (context) fix.rho<-FALSE
+    ndim<-2
+    if (context) ndim<-3
+
+    n.var<-2*ndim+ ndim*(ndim-1)/2
+ 
+    n.par<-n.var 
+    if (context) {
+      n.par<-n.var-2
+    }
+
+   if (!context & fix.rho) n.par<-n.par-1
+
+   mu<-param.pack(theta.em, fix.rho=fix.rho, r12=r12,  dim=ndim)$mu
+   Sigma<-param.pack(theta.em, fix.rho=fix.rho, r12=r12, dim=ndim)$Sigma
+ 
+  theta.fisher<-param.trans(theta.em)
+
+    Icom<-Icom.mvn(mu=mu, Sigma=Sigma, fix.rho=fix.rho, suff.stat=suff.stat, n=n)
+   Dvec<-Dcom.mvn(mu=mu, Sigma=Sigma, fix.rho=fix.rho, suff.stat=suff.stat, n=n)
+
+
+    theta.icom<-theta.em
+    if (fix.rho) theta.icom<-c(theta.em[-n.var], r12)
+
+    Icom.fisher<-Icom.transform(Icom=Icom, Dvec=Dvec, theta=theta.icom, transformation="Fisher", context=context, fix.rho=fix.rho)   
+
+
+    Vcom.fisher <- solve(Icom.fisher)
+
+      if (!context)  {
+      dV <- Vcom.fisher%*%DM%*%solve(diag(1,n.par)-DM)
+      Vobs.fisher <- Vcom.fisher+dV }
+
+      ###verify with the parameters.
+      ###repartition Icom 
+      if (context & !fix.rho) {
+       index<-c(1,4,2,3,5,6,7,8,9)
+       Itemp<-Icom.fisher[index,index]
+       invItemp<-solve(Itemp)
+       A1<-invItemp[1:2,1:2]
+       A2<-invItemp[1:2,3:9]
+       A3<-invItemp[3:9, 1:2]
+       A4<-invItemp[3:9, 3:9]
+       dV1<-(A4-t(A2)%*%solve(A1)%*%A2)%*%DM%*%solve(diag(rep(1,7))-DM)
+       dV<-matrix(0,9,9)
+       dV[3:9,3:9]<-dV1
+       Vobs.fisher<-invItemp+dV
+
+       index2<-c(1,3,4,2,5,6,7,8,9)
+       Vobs.fisher<-Vobs.fisher[index2,index2]
+     }
+
+ 
+ 
+    Iobs.fisher <- solve(Vobs.fisher)
+
+
+    ##transform Iobs.fisher to Iobs via delta method
+    ##V(theta)=d(fisher^(-1))V(bvn.trans(theta))d(fisher^(-1))'
+
+     if (!context) {
+        grad.invfisher <- c(1,1, exp(theta.fisher[3:4]))
+        if (! fix.rho)
+      grad.invfisher <- c(grad.invfisher,4*exp(2*theta.fisher[5])/(exp(2*theta.fisher[5])+1)^2)
+    }
+
+    if (context) {
+         grad.invfisher <- c(1,1, 1, exp(theta.fisher[4:6]))
+         grad.invfisher <- c(grad.invfisher,4*exp(2*theta.fisher[7:8])/(exp(2*theta.fisher[7:8])+1)^2)
+         if (!fix.rho) 
+           grad.invfisher <- c(grad.invfisher,4*exp(2*theta.fisher[9])/(exp(2*theta.fisher[9])+1)^2)
+    }
+
+
+    Vobs<-diag(grad.invfisher)%*%Vobs.fisher%*%diag(grad.invfisher)
+    Iobs<-solve(Vobs)
+    ## obtain a symmetric Cov matrix
+    Vobs.sym <- 0.5*(Vobs+t(Vobs))
+
+###unitscale transformation
+
+#theta.unit<-param.trans(theta.em, transformation="unitscale")
+#Icom.unit<-Icom.transform(Icom, Dvec,theta.em, transformation="unitscale")
+#Vobs.unit<-delta method
+
+if (!context) {
+   names(mu)<-c("W1","W2")
+   colnames(Sigma)<-rownames(Sigma)<-c("W1","W2")
+   names(suff.stat)<-c("S1","S2","S11","S22","S12")
+   if (!fix.rho) colnames(DM)<-rownames(DM)<-c("u1","u2","s1","s2","r12")   
+   if (fix.rho) colnames(DM)<-rownames(DM)<-c("u1","u2","s1","s2")   
+}   
+if (context) {
+   names(mu)<-c("X","W1","W2")
+   colnames(Sigma)<-rownames(Sigma)<-c("X","W1","W2")
+   names(suff.stat)<-c("Sx","S1","S2","Sxx","S11","S22","Sx1","Sx2","S12")
+   if (!fix.rho) {
+    colnames(DM)<-rownames(DM)<-c("u1","u2","s1","s2","r1x","r2x","r12")
+    colnames(Icom)<-rownames(Icom)<-c("ux","u1","u2","sx","s1","s2","r1x","r2x","r12")   }  
+   if (fix.rho) {
+    colnames(DM)<-rownames(DM)<-c("u1","u2","s1","s2","r1x","r2x")   
+colnames(Icom)<-rownames(Icom)<-c("ux","u1","u2","sx","s1","s2","r1x","r2x")   }  
+}   
+
+colnames(Iobs)<-colnames(Iobs.fisher)<-colnames(Icom.fisher)<-colnames(Vobs)<-colnames(Vobs.sym)<-colnames(Icom)
+rownames(Iobs)<-rownames(Iobs.fisher)<-rownames(Icom.fisher)<-rownames(Vobs)<-rownames(Vobs.sym)<-rownames(Icom)
+
+  res.out<-list(mu=mu, Sigma=Sigma, suff.stat=suff.stat, context=context, fix.rho=fix.rho)
+  res.out$DM<-DM
+    res.out$Icom<-Icom
+    res.out$Iobs<-Iobs
+    res.out$Fmis<-1-diag(Iobs)/diag(Icom)
+    res.out$Vcom<-Vcom<-solve(Icom)
+    res.out$Vobs.original<-Vobs
+    res.out$VFmis<-1-diag(Vcom)/diag(Vobs)
+    res.out$Vobs<-Vobs.sym
+    res.out$Icom.trans<-Icom.fisher
+    res.out$Iobs.trans<-Iobs.fisher
+    res.out$Fmis.trans<-1-diag(Iobs.fisher)/diag(Icom.fisher)
+    res.out$Imiss<-res.out$Icom-res.out$Iobs
+    res.out$Ieigen<-eigen(res.out$Imiss)[[1]][1]
+res.out
+}
diff --git a/R/print.ecoML.R b/R/print.ecoML.R
new file mode 100644
index 0000000..49708b1
--- /dev/null
+++ b/R/print.ecoML.R
@@ -0,0 +1,38 @@
+print.ecoML <- function(x, digits = max(3, getOption("digits") -3),
+                      ...){ 
+
+ cat("\nCall:\n", deparse(x$call), "\n\n", sep="")
+  
+   n.col<-5
+  if (x$fix.rho) n.col<-4
+  n.row<-1
+  if (x$sem) n.row<-3
+  param.table<-matrix(NA, n.row, n.col)
+  if (!x$context) 
+    param.table[1,]<-x$theta.em 
+  else if (x$context && !x$fix.rho) 
+     param.table[1,]<-x$theta.em[c(2,3,5,6,9)]
+  else if (x$context && x$fix.rho) 
+     param.table[1,]<-x$theta.em[c(2,3,5,6)]
+ 
+  
+  if (n.row>1) {
+    if (!x$context) {
+    param.table[2,]<-sqrt(diag(x$Vobs))
+    param.table[3,]<-Fmis<-1-diag(x$Iobs)/diag(x$Icom) }
+   else if (x$context && !x$fix.rho) {
+    param.table[2,]<-sqrt(diag(x$Vobs))[c(2,3,5,6,9)]
+    param.table[3,]<-Fmis<-(1-diag(x$Iobs)/diag(x$Icom))[c(2,3,5,6,9)] }
+   else if (x$context && x$fix.rho) {
+    param.table[2,]<-sqrt(diag(x$Vobs))[c(2,3,5,6)]
+    param.table[3,]<-Fmis<-(1-diag(x$Iobs)/diag(x$Icom))[c(2,3,5,6)] }
+
+  }
+  cname<-c("mu1", "mu2", "sigma1", "sigma2", "rho")
+  rname<-c("EM est.", "std. err.", "frac. missing")
+  rownames(param.table)<-rname[1:n.row]
+  colnames(param.table)<-cname[1:n.col]
+  print(param.table)
+  cat("\n")
+  invisible(x)
+}
diff --git a/R/print.summary.eco.R b/R/print.summary.eco.R
index 8c8df40..a51a4e4 100644
--- a/R/print.summary.eco.R
+++ b/R/print.summary.eco.R
@@ -1,16 +1,23 @@
 print.summary.eco <- function(x, digits=max(3, getOption("digits")-3), ...) {
-	cat("\nCall: ") 
-  	cat(paste(deparse(x$call), sep="\n", collapse="\n"))
+    cat("\nCall: ") 
+    cat(paste(deparse(x$call), sep="\n", collapse="\n"))
 
         cat("\n")
-	if (!is.null(x$param.table)) {
+    if (!is.null(x$param.table)) {
            cat("\nParameter Estimates:\n")
            printCoefmat(x$param.table, digits=digits, na.print="NA",...)
         }
  
-        cat("\nAggregate Estimates:\n")
-        printCoefmat(x$agg.table, digits=digits, na.print="NA",...)
 
+   cat("\n*** Insample Predictions ***\n")
+   cat("\nUnweighted:\n")
+   printCoefmat(x$agg.table, digits=digits, na.print="NA",...)
+  
+   if (!is.null(x$agg.wtable)) {
+   cat("\nWeighted:\n")
+   printCoefmat(x$agg.wtable, digits=digits, na.print="NA",...)
+  }
+   
         cat("\nNumber of Units:", x$n.obs)
         cat("\nNumber of Monte Carlo Draws:", x$n.draws)
    
diff --git a/R/print.summary.ecoML.R b/R/print.summary.ecoML.R
new file mode 100644
index 0000000..916b930
--- /dev/null
+++ b/R/print.summary.ecoML.R
@@ -0,0 +1,37 @@
+print.summary.ecoML <- function(x, digits=max(3,
+                                     getOption("digits")-3), ...) {
+
+  cat("\nCall: ", paste(deparse(x$call), sep="\n", collapse="\n"))
+  cat("\n")
+  if (!is.null(x$param.table)) {
+    cat("\n*** Parameter Estimates ***\n")
+    if (x$fix.rho)
+      cat("\nOriginal Model Parameters (rho is fixed at ", x$rho, "):\n", sep="")   
+    else
+      cat("\nOriginal Model Parameters:\n")
+    printCoefmat(x$param.table, digits=digits, na.print="NA",...)
+  }
+
+  cat("\n*** Insample Predictions ***\n")
+  cat("\nUnweighted:\n")
+  printCoefmat(x$agg.table, digits=digits, na.print="NA",...)
+  
+  if (!is.null(x$agg.wtable)) {
+  cat("\nWeighted:\n")
+  printCoefmat(x$agg.wtable, digits=digits, na.print="NA",...)
+  }
+  if (!is.null(x$W.table)) {
+    cat("\n\nUnit-level Estimates of W:\n")
+    printCoefmat(x$W.table, digits=digits, na.print="NA",...)
+  }
+
+  cat("\n\nLog-likelihood:", x$loglik)
+  cat("\nNumber of Observations:", x$n.obs)
+  cat("\nNumber of EM iterations:", x$iters.em)
+  if (x$sem)
+    cat("\nNumber of SEM iterations:", x$iters.sem)
+  cat("\nConvergence threshold for EM:", x$epsilon)
+  
+  cat("\n\n")
+  invisible(x)
+}
diff --git a/R/print.summary.ecoNP.R b/R/print.summary.ecoNP.R
index c9c3b2b..6a3cc67 100644
--- a/R/print.summary.ecoNP.R
+++ b/R/print.summary.ecoNP.R
@@ -1,13 +1,20 @@
 print.summary.ecoNP <- function(x, digits=max(3, getOption("digits")-3), ...) 
      {
-  	cat("\nCall: ") 
-  	cat(paste(deparse(x$call), sep="\n", collapse="\n"))
+    cat("\nCall: ") 
+    cat(paste(deparse(x$call), sep="\n", collapse="\n"))
 
-	cat("\n\nAggregate Estimates:\n")
+    cat("\n\nIn-sample Predictions:\n")
+    cat("\nUnweighted:\n")
         printCoefmat(x$agg.table, digits=digits, na.print="NA",...)
-       	cat("\nNumber of Units:", x$n.obs)
-       	cat("\nNumber of Monte Carlo Draws:", x$n.draws)
-      	if (!is.null(x$param.table)) {
+        
+    if (!is.null(x$agg.wtable)) {
+    cat("\nWeighted:\n")
+        printCoefmat(x$agg.wtable, digits=digits, na.print="NA",...)
+    
+    }
+        cat("\nNumber of Units:", x$n.obs)
+        cat("\nNumber of Monte Carlo Draws:", x$n.draws)
+        if (!is.null(x$param.table)) {
           tt <- x$param.table
           cat("\nParameter Estimates of mu1:\n")
           printCoefmat(tt$mu1.table, digits=digits, na.print="NA",...)
@@ -19,10 +26,10 @@ print.summary.ecoNP <- function(x, digits=max(3, getOption("digits")-3), ...)
           printCoefmat(tt$Sigma12.table, digits=digits, na.print="NA",...)
           cat("\nParameter Estimates of Sigma22:\n")
           printCoefmat(tt$Sigma22.table, digits=digits, na.print="NA",...)
-	}
+    }
 
-	if (!is.null(x$W1.table)) {
-     	  cat("\n\nUnit-level Estimates of W1:\n")
+    if (!is.null(x$W1.table)) {
+          cat("\n\nUnit-level Estimates of W1:\n")
           printCoefmat(x$W1.table, digits=digits, na.print="NA",...)
           cat("\n\nUnit-level Estimates of W2:\n")
           printCoefmat(x$W2.table, digits=digits, na.print="NA",...)
diff --git a/R/summary.eco.R b/R/summary.eco.R
index 29785ff..1ebeb7d 100644
--- a/R/summary.eco.R
+++ b/R/summary.eco.R
@@ -13,12 +13,12 @@ summary.eco <- function(object, CI = c(2.5, 97.5), param = TRUE,
   table.names<-c("mean", "std.dev", paste(min(CI), "%", sep=" "),
                  paste(max(CI), "%", sep=" ")) 
 
-  if (is.null(object$N))
-    N <- rep(1, nrow(object$X))
-  else N <- object$N
+  
+  agg.table <-agg.wtable <-NULL
+  N<-rep(1, length(object$X))
+  W1.agg.mean <- object$W[,1,]%*% (object$X*N/sum(object$X*N))
+  W2.agg.mean <- object$W[,2,]%*% ((1-object$X)*N/sum((1-object$X)*N))
 
-  W1.agg.mean <- object$W[,1,] %*% (object$X*N/sum(object$X*N))
-  W2.agg.mean <- object$W[,2,] %*% ((1-object$X)*N/sum((1-object$X)*N))
   agg.table <- rbind(cbind(mean(W1.agg.mean), sd(W1.agg.mean), 
                            quantile(W1.agg.mean, min(CI)/100), 
                            quantile(W1.agg.mean, max(CI)/100)),
@@ -27,6 +27,23 @@ summary.eco <- function(object, CI = c(2.5, 97.5), param = TRUE,
                            quantile(W2.agg.mean, max(CI)/100)))
   colnames(agg.table) <- table.names
   rownames(agg.table) <- c("W1", "W2")
+
+    
+  if (!is.null(object$N)) {
+    N <- object$N
+
+    W1.agg.wmean <- object$W[,1,] %*% (object$X*N/sum(object$X*N))
+    W2.agg.wmean <- object$W[,2,] %*% ((1-object$X)*N/sum((1-object$X)*N))
+    agg.wtable <- rbind(cbind(mean(W1.agg.wmean), sd(W1.agg.wmean), 
+                           quantile(W1.agg.wmean, min(CI)/100), 
+                           quantile(W1.agg.wmean, max(CI)/100)),
+                     cbind(mean(W2.agg.wmean), sd(W2.agg.wmean), 
+                           quantile(W2.agg.wmean, min(CI)/100), 
+                           quantile(W2.agg.wmean, max(CI)/100)))
+    colnames(agg.wtable) <- table.names
+    rownames(agg.wtable) <- c("W1", "W2")
+  }
+
   
   if (units) {
      W1.table <- cbind(apply(object$W[,1,subset], 2, mean), 
@@ -58,7 +75,7 @@ summary.eco <- function(object, CI = c(2.5, 97.5), param = TRUE,
     param.table <- NULL
   
   ans <- list(call = object$call, W1.table = W1.table, W2.table = W2.table,
-              agg.table = agg.table, param.table = param.table,
+              agg.table = agg.table, agg.wtable=agg.wtable, param.table = param.table,
               n.draws = n.draws, n.obs = n.obs) 
   
   class(ans) <-"summary.eco"
diff --git a/R/summary.ecoML.R b/R/summary.ecoML.R
new file mode 100644
index 0000000..c5d2ace
--- /dev/null
+++ b/R/summary.ecoML.R
@@ -0,0 +1,128 @@
+##for simlicity, this summary function only reports parameters related to W_1 and W_2
+summary.ecoML <- function(object, CI = c(2.5, 97.5),  param = TRUE, units = FALSE, subset = NULL, ...) { 
+
+
+      n.col<-5
+      if(object$context) n.col<-7
+      if (object$fix.rho) n.col<-n.col-1
+
+
+  n.row<-1
+  if (object$sem) n.row<-3
+
+
+  param.table<-matrix(NA, n.row, n.col)
+   
+  if (!object$context) {
+   param.table[1,]<-object$theta.em 
+   cname<-c("mu1", "mu2", "sigma1", "sigma2", "rho")
+  }
+  else if (object$context && !object$fix.rho) {
+   cname<-c("mu1", "mu2", "sigma1", "sigma2", "rho1X","rho2X","rho12")
+   param.table[1,]<-object$theta.em[c(2,3,5,6,7,8,9)]   
+  }
+  else if (object$context && object$fix.rho) {
+   cname<-c("mu1", "mu2", "sigma1", "sigma2", "rho1X","rho2X")
+   param.table[1,]<-object$theta.em[c(2,3,5,6,7,8)] 
+  }
+
+  if (n.row>1) {
+    if (!object$context) {
+    param.table[2,]<-sqrt(diag(object$Vobs))
+    param.table[3,]<-Fmis<-1-diag(object$Iobs)/diag(object$Icom)
+   }
+   else if (object$context && !object$fix.rho) {
+    param.table[2,]<-sqrt(diag(object$Vobs)[c(2,3,5,6,7,8,9)])
+    param.table[3,]<-Fmis<-(1-diag(object$Iobs)/diag(object$Icom))[c(2,3,5,6,7,8,9)]
+  }
+   else if (object$context && object$fix.rho) {
+    param.table[2,]<-sqrt(diag(object$Vobs)[c(2,3,5,6)])
+    param.table[3,]<-Fmis<-(1-diag(object$Iobs)/diag(object$Icom))[c(2,3,5,6)]
+  }
+
+  }
+  rname<-c("ML est.", "std. err.", "frac. missing")
+  
+
+
+  rownames(param.table)<-rname[1:n.row]
+  colnames(param.table)<-cname[1:n.col]
+  
+  n.obs <- nrow(object$W)
+
+   if (is.null(subset)) subset <- 1:n.obs 
+  else if (!is.numeric(subset))
+    stop("Subset should be a numeric vector.")
+  else if (!all(subset %in% c(1:n.obs)))
+    stop("Subset should be any numbers in 1:obs.")
+    
+  table.names<-c("mean", "std.dev", paste(min(CI), "%", sep=" "),
+                 paste(max(CI), "%", sep=" ")) 
+
+  W1.mean <- mean(object$W[,1])
+  W2.mean <- mean(object$W[,2])
+  W1.sd <- sd(object$W[,1])
+  W2.sd <- sd(object$W[,2])
+#  W1.q1 <-  W1.mean-1.96*W1.sd
+#  W1.q2 <-  W1.mean+1.96*W1.sd
+#  W2.q1 <-  W2.mean-1.96*W2.sd
+#  W2.q2 <-  W2.mean+1.96*W2.sd
+  W1.q1 <-  quantile(object$W[,1],min(CI)/100)
+  W1.q2 <-  quantile(object$W[,1],max(CI)/100)
+  W2.q1 <-  quantile(object$W[,2],min(CI)/100)
+  W2.q2 <-  quantile(object$W[,2],max(CI)/100)
+  
+  agg.table <- rbind(cbind(W1.mean, W1.sd, W1.q1, W1.q2),
+                     cbind(W2.mean, W2.sd, W2.q1, W2.q2)) 
+  colnames(agg.table) <- table.names
+  rownames(agg.table) <- c("W1", "W2")
+
+ # if (is.null(object$N))
+ #   N <- rep(1, nrow(object$X))
+ # else
+ 
+ agg.wtable<-NULL
+ if (!is.null(object$N)) {
+    N <- object$N
+}
+else {
+    N <- rep(1:n.obs)
+}
+  weighted.var <- function(x, w) {
+    return(sum(w * (x - weighted.mean(x,w))^2)/((length(x)-1)*mean(w)))
+    }
+
+  W1.mean <- weighted.mean(object$W[,1], object$X*N)
+  W2.mean <- weighted.mean(object$W[,2], (1-object$X)*N)
+  W1.sd <- weighted.var(object$W[,1], object$X*N)^0.5
+  W2.sd <- weighted.var(object$W[,1], (1-object$X)*N)^0.5
+  W1.q1 <-  W1.mean-1.96*W1.sd
+  W1.q2 <-  W1.mean+1.96*W1.sd
+  W2.q1 <-  W2.mean-1.96*W2.sd
+  W2.q2 <-  W2.mean+1.96*W2.sd
+#  W1.q1 <-  quantile(object$W[,1] * object$X*N/mean(object$X*N),min(CI)/100)
+#  W1.q2 <-  quantile(object$W[,1] * object$X*N/mean(object$X*N),max(CI)/100)
+#  W2.q1 <-  quantile(object$W[,2]*(1-object$X)*N/(mean((1-object$X)*N)),min(CI)/100)
+#  W2.q2 <-  quantile(object$W[,2]*(1-object$X)*N/(mean((1-object$X)*N)),max(CI)/100)
+  agg.wtable <- rbind(cbind(W1.mean, W1.sd, W1.q1, W1.q2),
+                      cbind(W2.mean, W2.sd, W2.q1, W2.q2))
+  colnames(agg.wtable) <- table.names
+  rownames(agg.wtable) <- c("W1", "W2")
+  
+  
+  if (units) 
+    W.table <- object$W[subset,] 
+  else
+    W.table <-  NULL
+  
+  ans <- list(call = object$call, iters.sem = object$iters.sem,
+              iters.em = object$iters.em, epsilon = object$epsilon,
+              sem = object$sem, fix.rho = object$fix.rho, loglik = object$loglik,
+              rho=object$rho, param.table = param.table, W.table = W.table, 
+              agg.wtable = agg.wtable, agg.table=agg.table, n.obs = n.obs) 
+ # if (object$fix.rho)
+ #   ans$rho<-object$rho
+  
+  class(ans) <-"summary.ecoML"
+  return(ans)
+}
diff --git a/R/summary.ecoNP.R b/R/summary.ecoNP.R
index 2134a88..5c77d06 100644
--- a/R/summary.ecoNP.R
+++ b/R/summary.ecoNP.R
@@ -10,13 +10,12 @@ summary.ecoNP <- function(object, CI=c(2.5, 97.5), param=FALSE, units=FALSE, sub
 
   table.names<-c("mean", "std.dev", paste(min(CI), "%", sep=" "), paste(max(CI), "%", sep=" "))
 
+  agg.table <-agg.wtable <-NULL
+  
+  N<-rep(1, length(object$X))
+  W1.agg.mean <- object$W[,1,]%*% (object$X*N/sum(object$X*N))
+  W2.agg.mean <- object$W[,2,]%*% ((1-object$X)*N/sum((1-object$X)*N))
 
-  if (is.null(object$N))
-    N <- rep(1, nrow(object$X))
-  else N <- object$N
-
-  W1.agg.mean <- object$W[,1,] %*% (object$X*N/sum(object$X*N))
-  W2.agg.mean <- object$W[,2,] %*% ((1-object$X)*N/sum((1-object$X)*N))
   agg.table <- rbind(cbind(mean(W1.agg.mean), sd(W1.agg.mean), 
                            quantile(W1.agg.mean, min(CI)/100), 
                            quantile(W1.agg.mean, max(CI)/100)),
@@ -26,6 +25,22 @@ summary.ecoNP <- function(object, CI=c(2.5, 97.5), param=FALSE, units=FALSE, sub
   colnames(agg.table) <- table.names
   rownames(agg.table) <- c("W1", "W2")
 
+    
+  if (!is.null(object$N)) {
+    N <- object$N
+
+    W1.agg.wmean <- object$W[,1,] %*% (object$X*N/sum(object$X*N))
+    W2.agg.wmean <- object$W[,2,] %*% ((1-object$X)*N/sum((1-object$X)*N))
+    agg.wtable <- rbind(cbind(mean(W1.agg.wmean), sd(W1.agg.wmean), 
+                           quantile(W1.agg.wmean, min(CI)/100), 
+                           quantile(W1.agg.wmean, max(CI)/100)),
+                     cbind(mean(W2.agg.wmean), sd(W2.agg.wmean), 
+                           quantile(W2.agg.wmean, min(CI)/100), 
+                           quantile(W2.agg.wmean, max(CI)/100)))
+    colnames(agg.wtable) <- table.names
+    rownames(agg.wtable) <- c("W1", "W2")
+  }
+  
   if (units) {
      W1.table <- cbind(apply(object$W[,1,subset], 2, mean), 
                        apply(object$W[,1,subset], 2, sd),
@@ -78,7 +93,8 @@ summary.ecoNP <- function(object, CI=c(2.5, 97.5), param=FALSE, units=FALSE, sub
       param.table <- NULL
 
   ans <- list(call = object$call, W1.table = W1.table, W2.table = W2.table,
-              agg.table = agg.table, param.table = param.table,
+              agg.table = agg.table, agg.wtable=agg.wtable, 
+		param.table = param.table,
               n.draws = n.draws, n.obs = n.obs) 
 
   class(ans) <-c("summary.eco", "summary.ecoNP") 
diff --git a/data/forgnlit30.txt b/data/forgnlit30.txt
new file mode 100644
index 0000000..cbd36b2
--- /dev/null
+++ b/data/forgnlit30.txt
@@ -0,0 +1,49 @@
+Y X W1  W2  ICPSR
+0.045202466 0.291846797 0.145636659 0.003811143 1
+0.026541216 0.153396059 0.0853728 0.015881529 2
+0.03500137 0.301126414 0.106979746 0.003987764 3
+0.026694481 0.213576814 0.09595563 0.007884538 4
+0.048657286 0.306146298 0.142766681 0.007133774 5
+0.021524805 0.143229614 0.072070992 0.013074804 6
+0.025203959 0.098517622 0.142712249 0.012362181 11
+0.037599262 0.263947999 0.128569664 0.004977307 12
+0.037062205 0.310553816 0.108343096 0.004954474 13
+0.030440922 0.165689757 0.153832806 0.005935923 14
+0.022770055 0.200200251 0.09030107 0.005866166 21
+0.014205567 0.052765053 0.101098671 0.009365248 22
+0.01869335 0.220147504 0.06692675 0.005077362 23
+0.0205759 0.123151548 0.116277538 0.007134804 24
+0.017936563 0.162656154 0.078153328 0.006239305 25
+0.007090591 0.082944688 0.035941713 0.0044811 31
+0.007667657 0.047928719 0.05920456 0.00507321 32
+0.011668725 0.187966478 0.043343661 0.00433673 33
+0.018071769 0.053195892 0.075326687 0.014854919 34
+0.009827933 0.105670053 0.060263195 0.003868727 35
+0.012326313 0.201279153 0.044401784 0.004243234 36
+0.007713083 0.124256422 0.037021186 0.003554651 37
+0.04880163 0.01714591 0.073591057 0.048369178 40
+0.048633258 0.012190714 0.085494717 0.048178345 41
+0.034953755 0.009674529 0.065823285 0.034652189 42
+0.021317305 0.070420714 0.054219659 0.018824772 43
+0.033493063 0.009755987 0.04008103 0.033428157 44
+0.077076847 0.034047996 0.192354229 0.073013536 45
+0.027616241 0.009233093 0.125910064 0.026700227 46
+0.056284814 0.005197026 0.051945054 0.056307485 47
+0.051388674 0.007367355 0.056690208 0.051349326 48
+0.015927794 0.028828912 0.072992103 0.014233858 49
+0.056581326 0.011937021 0.058327962 0.056560224 51
+0.022418672 0.085531842 0.122245529 0.013081688 52
+0.017854061 0.016248263 0.05551385 0.017232048 53
+0.053595931 0.007869775 0.058246427 0.053559042 54
+0.043138699 0.042101171 0.19189523 0.036600613 56
+0.007085015 0.072256842 0.035756003 0.004851989 61
+0.016783985 0.108706261 0.08615381 0.008323324 62
+0.006840419 0.087909144 0.039684643 0.003674827 63
+0.009578677 0.172652247 0.042607555 0.002686158 64
+0.015188504 0.17835894 0.07450209 0.00231292 65
+0.076931986 0.030971684 0.068343005 0.077206504 66
+0.006617641 0.114790521 0.035687921 0.00284792 67
+0.006991899 0.114592071 0.041513104 0.002524065 68
+0.012793782 0.184915152 0.056907312 0.002785916 71
+0.007322475 0.131966539 0.035895124 0.002978594 72
+0.007917342 0.188164613 0.029491874 0.002916866 73
diff --git a/data/forgnlit30c.txt b/data/forgnlit30c.txt
new file mode 100644
index 0000000..071c165
--- /dev/null
+++ b/data/forgnlit30c.txt
@@ -0,0 +1,1977 @@
+Y X W1 W2 state county
+0.009808835 0.007159734 0.09 0.009230547 22 590
+0.048181999 0.009934433 0.43 0.044350793 56 1010
+0.123895797 0.005626512 0.18 0.12357834 40 1670
+0.015302869 0.010733262 0.079207921 0.014609518 34 570
+0.040637534 0.010660756 0.04950495 0.040541982 22 790
+0.027718766 0.010217501 0.04950495 0.027493868 21 1270
+0.031776987 0.011586555 0.089108911 0.031104921 49 1990
+0.030893918 0.008184882 0.029411765 0.030906149 52 350
+0.007731092 0.017142857 0.107843137 0.005984952 53 590
+0.036932569 0.008990745 0.049019608 0.036822912 48 850
+0.001702852 0.043422733 0.009803922 0.001335113 62 1030
+0.074908329 0.017984983 0.145631068 0.073613087 48 430
+0.008439716 0.007304965 0.009708738 0.008430378 34 810
+0.046119514 0.025539301 0.048543689 0.04605598 40 810
+0.042474189 0.00932802 0 0.04287412 51 1790
+0.115406754 0.009881044 0.077669903 0.115783354 34 2210
+0.003951268 0.033915048 0.029126214 0.003067485 40 6500
+0.013417093 0.008065767 0.076923077 0.012900704 21 250
+0.001316656 0.068466096 0 0.001413428 68 390
+0.087698326 0.005650945 0.009615385 0.088142077 42 1110
+0.019419469 0.00426982 0.019230769 0.019420278 53 1370
+0.026722925 0.012189405 0.028846154 0.026696725 34 1410
+0.005972764 0.008282233 0.048076923 0.005621135 49 2790
+0.094412844 0.00558184 0.238095238 0.09360633 54 130
+0.01132457 0.021233569 0.028571429 0.010950413 43 270
+0.04868414 0.005605082 0.095238095 0.048421731 51 470
+0.005633803 0.042253521 0.047619048 0.003781513 63 810
+0.005875632 0.008568631 0 0.005926414 49 4290
+0.271094403 0.01475912 0.471698113 0.268089316 45 70
+0.001666667 0.035333333 0.028301887 0.000691085 32 1090
+0.012449187 0.026930894 0.037735849 0.011749347 49 2970
+0.003717472 0.039776952 0.028037383 0.002710027 35 850
+0.002326483 0.041488949 0.028037383 0.001213592 32 1010
+0.011324112 0.008654857 0.037383178 0.011096606 22 1590
+0.095260223 0.004143432 0.112149533 0.095189952 51 1770
+0.033874292 0.013781556 0.336448598 0.029646075 49 4710
+0.004171011 0.009384776 0.009259259 0.004122807 66 90
+0.014474643 0.011246485 0.009259259 0.014533965 34 890
+0.008927232 0.008102026 0.037037037 0.008697625 49 3530
+0.186683769 0.012709888 0.064220183 0.188260305 66 610
+0.068487336 0.015423801 0.100917431 0.067979304 34 930
+0.005557099 0.010095397 0.018348624 0.005426647 32 1070
+0.004964011 0.009017953 0.018348624 0.004842211 24 1170
+0.045465451 0.013075816 0.036697248 0.045581621 34 1690
+0.011167897 0.010316108 0.04587156 0.010806159 34 1850
+0.011250827 0.009017207 0.04587156 0.010935804 49 2070
+0.049572967 0.02042332 0 0.05060652 40 170
+0.01790027 0.00781361 0.081818182 0.017396907 14 1190
+0.140257649 0.017874396 0.126126126 0.140514838 66 530
+0.02214349 0.004274656 0.009009009 0.022199876 54 1130
+0.00360036 0.100810081 0.017857143 0.002002002 62 570
+0.004062038 0.041358936 0.044642857 0.002311248 49 570
+0.008987846 0.011439077 0.035714286 0.008678582 34 2050
+0.004460303 0.099910794 0.017857143 0.002973241 49 3230
+0.030797101 0.204710145 0.097345133 0.013667426 65 290
+0.045512224 0.004779629 0.044247788 0.045518297 48 910
+0.00545809 0.044054581 0.008849558 0.005301794 35 1490
+0.024162679 0.009011164 0.044247788 0.023980043 34 2250
+0.017762938 0.00754591 0.044247788 0.017561566 49 3370
+0.064667248 0.002617921 0.087719298 0.064606742 48 70
+0.002512563 0.07160804 0.026315789 0.00067659 63 290
+0.007177562 0.012588339 0.01754386 0.007045404 31 510
+0.004428341 0.04589372 0.052631579 0.002109705 35 1030
+0.008324873 0.011573604 0.043859649 0.007908792 34 1110
+0.017455462 0.020694619 0.026086957 0.017273061 49 230
+0.020463752 0.008031848 0.034782609 0.020347814 34 1670
+0.02899792 0.014315429 0.025641026 0.029046673 66 370
+0.005828072 0.056823701 0.017094017 0.005149331 32 810
+0.055033809 0.004432757 0.008474576 0.055241114 40 150
+0.021135371 0.020611354 0.033898305 0.020866774 40 530
+0.005236083 0.010931472 0.025210084 0.005015325 22 870
+0.004836759 0.071946796 0.025210084 0.003257329 62 930
+0.012134936 0.006909365 0.193277311 0.010874649 14 990
+0.042519029 0.007076594 0.117647059 0.04198359 56 1030
+0.015804099 0.010420285 0.05 0.015444015 21 1590
+0.005428882 0.021715527 0.1 0.003329634 49 3810
+0.015744096 0.007559665 0.049586777 0.015486308 24 150
+0.008237964 0.005968824 0 0.00828743 49 490
+0.006876927 0.014346692 0.024793388 0.006616143 34 630
+0.026488139 0.008805123 0.074380165 0.026062697 56 970
+0.003574468 0.020595745 0.016528926 0.003302051 49 1110
+0.064241197 0.006445427 0.090909091 0.064068196 47 1950
+0.010268562 0.096366509 0.040983607 0.006993007 43 350
+0.009068924 0.01229343 0.057377049 0.00846766 53 430
+0.034562212 0.00969331 0.040983607 0.034499358 44 590
+0.067037114 0.003625234 0.057377049 0.067072261 41 890
+0.003665445 0.040986338 0.016260163 0.003127172 73 230
+0.362409906 0.005682868 0.211382114 0.363273082 45 1130
+0.022279278 0.010187179 0.06504065 0.021839177 53 1310
+0.009626955 0.14921781 0.040322581 0.004243281 62 1170
+0.007199424 0.029757619 0.032258065 0.006430868 49 4650
+0.022321429 0.019929847 0.056 0.021636571 21 130
+0.050342131 0.01018247 0.056 0.050283927 52 450
+0.223241115 0.006476013 0.192 0.223444752 45 570
+0.008236505 0.006128352 0.016 0.008188635 22 1350
+0.005633291 0.022896602 0 0.005765297 44 530
+0.05267868 0.005125493 0.063492063 0.052622971 41 770
+0.012989458 0.01185994 0.015873016 0.012954849 21 1390
+0.068623157 0.018542853 0.039370079 0.069175841 43 50
+0.004635683 0.015095685 0 0.004706734 31 390
+0.010523096 0.00776996 0.086614173 0.009927241 22 630
+0.001764446 0.056462285 0 0.001870033 72 690
+0.073175485 0.012569424 0.279069767 0.070554569 41 70
+0.007376492 0.012520625 0.007751938 0.007371732 34 250
+0.083941606 0.024779101 0.379844961 0.076423084 45 950
+0.034926471 0.014935662 0.230769231 0.03195708 45 110
+0.033798604 0.012961117 0.076923077 0.033232323 56 950
+0.032416448 0.00725325 0.061538462 0.032203676 34 2010
+0.031447434 0.009228367 0.038461538 0.031382102 34 2150
+0.073038616 0.023064826 0.303030303 0.067608657 45 930
+0.01240709 0.007604345 0.060150376 0.012041251 24 270
+0.009674945 0.007846145 0.060150376 0.009275776 22 750
+0.01572641 0.0166001 0.007518797 0.015864957 40 8400
+0.148243913 0.007218272 0.298507463 0.147151384 45 90
+0.016173515 0.006263731 0.044776119 0.015993226 49 2930
+0.02884338 0.03865013 0.037313433 0.02850285 49 4890
+0.00874176 0.019346518 0.02962963 0.00832968 49 90
+0.010928576 0.009518438 0.051851852 0.010535308 22 310
+0.024053545 0.028236771 0.014814815 0.024321997 66 450
+0.050120982 0.011666091 0 0.050712599 40 610
+0.00971251 0.017482517 0 0.00988533 43 730
+0.008090494 0.010113117 0.074074074 0.007416377 34 790
+0.021375114 0.007268616 0.066666667 0.021043497 24 1050
+0.005291954 0.006054355 0.022222222 0.005188828 22 1070
+0.005869797 0.024190679 0.036764706 0.005103901 61 90
+0.017888055 0.019619158 0.007352941 0.018098882 21 1710
+0.036409374 0.01800609 0.169117647 0.033976001 34 1930
+0.035264039 0.012077934 0.058394161 0.03498126 42 1250
+0.007058462 0.010510971 0.01459854 0.006978367 22 1830
+0.003175467 0.012429686 0.00729927 0.003123565 49 2330
+0.010715777 0.006401633 0.043478261 0.010504692 53 90
+0.045051289 0.01912947 0.072463768 0.044516676 52 370
+0.030957 0.011154219 0.028985507 0.030979238 40 1070
+0.042709232 0.019844694 0.065217391 0.042253521 40 1510
+0.005575648 0.02079566 0.057971014 0.004462912 49 2090
+0.007905138 0.091567852 0.050359712 0.003625816 63 150
+0.000478354 0.033245635 0 0.000494805 35 570
+0.024079367 0.00456621 0.050359712 0.023958815 49 2770
+0.026221398 0.017316017 0.442857143 0.018879799 45 150
+0.048079048 0.00441961 0.05 0.04807052 42 930
+0.019207224 0.010713193 0.057142857 0.018796411 44 2750
+0.007426662 0.013089491 0.028368794 0.007148904 34 490
+0.00236421 0.008547527 0 0.002384592 49 1790
+0.055921202 0.006400073 0.14893617 0.055322065 47 1910
+0.058599168 0.049583911 0.356643357 0.043049982 43 370
+0.07197272 0.0103751 0.195804196 0.070674487 45 690
+0.119586272 0.005226417 0.167832168 0.119332794 51 710
+0.07004749 0.012126866 0.034965035 0.070478153 51 1370
+0.004212027 0.009266459 0.06993007 0.003597358 49 2270
+0.008467059 0.011611967 0.027777778 0.008240189 22 490
+0.005555556 0.16 0.020833333 0.002645503 71 510
+0.003840983 0.02765508 0.013888889 0.003555204 67 530
+0.038467652 0.011444921 0.006944444 0.03883261 34 1050
+0.033320076 0.006407707 0.124137931 0.032734389 46 670
+0.039879019 0.016242859 0.062068966 0.039512639 40 7900
+0.000740192 0.053663953 0 0.000782167 40 8300
+0.089669168 0.006890368 0.006849315 0.090243787 54 190
+0.011855565 0.009890929 0.04109589 0.011563462 34 270
+0.003144654 0.091823899 0.020547945 0.001385042 72 310
+0.138720305 0.025316456 0.068493151 0.140544387 66 430
+0.220729887 0.009903677 0.267123288 0.220265826 45 1090
+0.013125265 0.008830823 0.198630137 0.011472509 22 1330
+0.003773585 0.039892183 0.013513514 0.003368894 67 10
+0.029166442 0.007964268 0.040540541 0.029075129 42 450
+0.0261772 0.011462206 0.047297297 0.02593231 34 1310
+0.024882924 0.010344587 0.108108108 0.024012995 22 1730
+0.002156722 0.035705727 0 0.002236581 35 290
+0.027335072 0.014391964 0.046979866 0.027048216 34 430
+0.016908213 0.012853692 0.040268456 0.016604037 22 470
+0.012685247 0.009087028 0.033557047 0.012493845 53 1410
+0.019951632 0.018137848 0.08 0.018842365 52 330
+0.067484663 0.004018539 0.02 0.067676251 54 1630
+0.041948363 0.008038328 0.033112583 0.042019964 42 1150
+0.022061432 0.010711499 0.046357616 0.021798365 22 1290
+0.017898337 0.012011773 0.059602649 0.017391304 44 1850
+0.007354896 0.013380594 0.079470199 0.006376864 31 1850
+0.011743895 0.007518058 0.026143791 0.011634815 22 270
+0.024312699 0.009538059 0.039215686 0.024169184 42 910
+0.030434783 0.133043478 0.150326797 0.012036108 64 1030
+0.017265044 0.011052518 0.045751634 0.016946676 53 1170
+0.038166097 0.006193445 0.006493506 0.038363482 48 410
+0.031397355 0.008855664 0.038961039 0.031329775 51 1010
+0.012371421 0.007135576 0.038961039 0.012180325 22 1450
+0.037963582 0.006431674 0.051948052 0.037873056 40 1610
+0.223493379 0.013962706 0.038709677 0.226109995 45 530
+0.006587146 0.006943208 0.012820513 0.006543564 22 230
+0.038376384 0.012792128 0.051282051 0.038209154 53 350
+0.022631377 0.011967779 0.038461538 0.02243963 22 1470
+0.009100101 0.017526121 0.019230769 0.008919383 49 3070
+0.039124947 0.011008274 0.038216561 0.039135058 42 710
+0.007794797 0.014230037 0.038216561 0.007355645 49 770
+0.015951359 0.011230329 0.178343949 0.014106923 46 1130
+0.02681956 0.014878696 0.133757962 0.025204425 34 1570
+0.005984043 0.026097074 0.025477707 0.005461683 49 3250
+0.044395305 0.013437659 0.120253165 0.043362069 42 290
+0.02563788 0.019381747 0.063291139 0.02489367 66 590
+0.034555625 0.009819764 0.012658228 0.034772784 47 890
+0.005815765 0.016408765 0.037974684 0.005279274 31 1590
+0.003642384 0.052649007 0.012578616 0.003145753 66 290
+0.019873409 0.01082148 0.144654088 0.018508325 24 1110
+0.005020818 0.019715895 0.01863354 0.004747033 66 150
+0.121402327 0.024647887 0.391304348 0.114581698 45 770
+0.006428801 0.051751848 0.049689441 0.004067797 32 830
+0.003243965 0.01536113 0.01863354 0.003003876 31 1770
+0.030379747 0.031353457 0.043478261 0.02995577 49 2730
+0.015822893 0.005560323 0.024691358 0.015773306 14 10
+0.027647835 0.009715725 0.030864198 0.027616279 34 230
+0.00863061 0.031261987 0.024539877 0.008117205 48 130
+0.019168534 0.020288773 0.073619632 0.01804091 52 270
+0.014751211 0.035887274 0.190184049 0.008221055 32 690
+0.00869329 0.044281445 0.098159509 0.004548039 49 1750
+0.023341232 0.019312796 0.159509202 0.020659659 34 1810
+0.009024006 0.01436832 0.030487805 0.008711111 41 470
+0.007937224 0.014792099 0.06097561 0.007140895 22 1510
+0.004449388 0.061179088 0.012121212 0.003949447 72 130
+0.007996668 0.013744273 0.012121212 0.007939189 31 530
+0.021980279 0.011366749 0.054216867 0.021609641 46 1330
+0.011843228 0.008330406 0.036144578 0.011639087 21 1490
+0.008637789 0.010621281 0.078313253 0.007889801 24 1590
+0.010012516 0.104505632 0.035928144 0.00698812 62 270
+0.034103599 0.014345847 0.047904192 0.033902737 42 470
+0.111685988 0.006535439 0.083333333 0.111872504 51 130
+0.063643351 0.025980015 0.124260355 0.062026515 46 450
+0.01092233 0.041019417 0.029585799 0.010124019 14 530
+0.066477916 0.019139298 0.071005917 0.066389562 43 1310
+0.083212048 0.010757451 0 0.084116932 40 30
+0.001996008 0.169660679 0.005882353 0.001201923 62 470
+0.093881579 0.011184211 0.082352941 0.094011976 51 730
+0.02310419 0.010760856 0.129411765 0.021947786 22 770
+0.004031677 0.024478042 0.005882353 0.00398524 32 2070
+0.025254105 0.01336982 0.087719298 0.024407639 22 1230
+0.003441946 0.039467646 0.01744186 0.002866699 73 130
+0.007665505 0.008561473 0.01744186 0.007581082 21 450
+0.012178517 0.01301059 0.01744186 0.012109135 34 1630
+0.035778321 0.01409943 0.12716763 0.034471357 47 490
+0.050546939 0.00564898 0.057803468 0.050505714 44 1150
+0.005083023 0.019654354 0.063218391 0.003917502 53 70
+0.047399329 0.00810962 0.051724138 0.04736397 47 510
+0.00399844 0.016968988 0.022988506 0.003670635 32 1510
+0.025666017 0.019059996 0.159090909 0.023073526 34 150
+0.003584229 0.105137395 0.017045455 0.00200267 62 490
+0.03757743 0.005166897 0.039772727 0.037566028 47 1590
+0.002905552 0.006161171 0.011363636 0.002853117 49 3030
+0.047638148 0.005461109 0.033898305 0.047713594 41 150
+0.009146048 0.017040531 0.02259887 0.008912831 34 870
+0.046477237 0.006232175 0.062146893 0.046378968 41 1030
+0.016706444 0.016247476 0.101694915 0.01530279 34 1130
+0.01166049 0.015999277 0.033898305 0.011298916 34 1650
+0.005820643 0.011979695 0.016949153 0.00568571 32 2050
+0.007528231 0.074445839 0.056179775 0.003615002 72 550
+0.013315927 0.023368146 0.055865922 0.012297821 44 950
+0.00726979 0.010327718 0.011173184 0.007229056 49 2530
+0.007335105 0.010338454 0.033519553 0.00706157 49 4870
+0.004433186 0.1139962 0.016666667 0.002859185 68 350
+0.003804348 0.097826087 0.027777778 0.001204819 37 1170
+0.002149382 0.032240731 0.033333333 0.001110494 32 1930
+0.039783978 0.01620162 0.044444444 0.039707228 49 2910
+0.006091757 0.034456501 0.016574586 0.005717666 32 170
+0.009888476 0.013457249 0.082872928 0.008892908 22 1210
+0.006807796 0.016879605 0.022099448 0.006545248 49 1930
+0.015709014 0.009594096 0.06043956 0.015275708 22 710
+0.008234218 0.083257091 0.027472527 0.006487026 71 1050
+0.000402253 0.073612228 0 0.000434216 72 150
+0.015591253 0.012351512 0.109289617 0.014419463 53 550
+0.002204262 0.134459956 0.010928962 0.000848896 64 610
+0.004754358 0.048335975 0.027322404 0.003608104 35 690
+0.034190683 0.009451503 0.103825137 0.033526253 24 790
+0.026894064 0.012003936 0.010928962 0.027088036 34 910
+0.00449827 0.06366782 0.043478261 0.001847746 62 170
+0.009527299 0.067423965 0.005434783 0.009823183 43 530
+0.080652616 0.008504345 0.190217391 0.079712847 24 1290
+0.01426025 0.054961378 0.005405405 0.014775228 66 270
+0.010388762 0.012087553 0.032432432 0.010119048 22 410
+0.019016122 0.025492628 0.064864865 0.017816742 43 490
+0.017258815 0.01247219 0.097297297 0.016247952 49 990
+0.040313292 0.042616909 0.037837838 0.040423484 62 1090
+0.087336449 0.00864486 0.362162162 0.084939901 40 1850
+0.036160916 0.02802471 0.048387097 0.035808402 52 150
+0.017988269 0.008081686 0.021505376 0.017959613 34 190
+0.004243009 0.036258438 0.037234043 0.003001801 67 210
+0.004692244 0.051890698 0.026595745 0.00349345 68 270
+0.002875767 0.036042945 0.026595745 0.001988862 32 1190
+0.001359619 0.042601405 0.005319149 0.001183432 32 1790
+0.027414562 0.014041605 0.063492063 0.026900761 40 5900
+0.002115283 0.03349198 0.021052632 0.001459055 67 130
+0.008367645 0.012684287 0.068062827 0.007600726 22 1790
+0.002072665 0.023287003 0.010471204 0.001872425 32 1850
+0.03469775 0.051775549 0.22513089 0.0242996 49 4730
+0.014220904 0.008375502 0.067708333 0.013769136 22 510
+0.009761388 0.018931177 0.072916667 0.008542714 34 1390
+0.010049437 0.015560418 0.015625 0.009961307 21 1470
+0.012968394 0.012512219 0.109375 0.011746849 49 2030
+0.01019665 0.017571012 0.067357513 0.009174312 22 90
+0.001308901 0.084205934 0 0.001429252 37 1210
+0.063416189 0.020501381 0.165803109 0.061273181 49 3610
+0.019027484 0.031549846 0.072164948 0.01729639 49 130
+0.014478706 0.01175259 0.030927835 0.014283087 21 610
+0.058104511 0.00679462 0.128865979 0.057620425 53 790
+0.011084547 0.010595523 0.066666667 0.010489318 24 1350
+0.006266067 0.031491003 0.015306122 0.00597213 72 630
+0.016243323 0.021367055 0.086734694 0.014704244 40 5400
+0.034226332 0.018781581 0.076142132 0.033424019 56 750
+0.003604325 0.039447337 0.010152284 0.003335418 62 830
+0.00916358 0.007681809 0.030456853 0.008998743 49 1330
+0.018899172 0.006397142 0.050761421 0.018694032 49 4410
+0.011712153 0.010786076 0.186868687 0.009802302 22 110
+0.018756623 0.006993995 0.03030303 0.018675299 53 190
+0.054035482 0.006184408 0.03030303 0.054183167 42 310
+0.007861635 0.077830189 0.075757576 0.002131287 35 710
+0.064566331 0.007515658 0.141414141 0.063984396 51 1930
+0.004407875 0.019492605 0.055276382 0.003396603 32 770
+0.010031035 0.011028597 0.015075377 0.009974783 34 1010
+0.058690745 0.064172847 0.547738693 0.025155065 40 1010
+0.012139909 0.007973076 0.005025126 0.012197092 49 2510
+0.019206785 0.049887753 0 0.02021528 43 190
+0.039437315 0.009921027 0.054726368 0.039284112 52 430
+0.008962264 0.02370283 0.024875622 0.008575915 49 4090
+0.012012012 0.121921922 0.078817734 0.002735978 65 250
+0.004988613 0.022014966 0.034482759 0.004324684 31 1150
+0.006429652 0.025718608 0.058823529 0.005046584 32 70
+0.023235031 0.18230563 0.117647059 0.002185792 65 150
+0.017481727 0.015447216 0.043902439 0.017067197 42 10
+0.004692294 0.012332311 0.019512195 0.004507248 34 70
+0.008567604 0.055153949 0.024271845 0.007650893 43 610
+0.009057706 0.03009496 0.04368932 0.00798313 49 2650
+0.086555891 0.010422961 0.053140097 0.086907851 47 230
+0.009524366 0.012095361 0.033816425 0.009226947 34 370
+0.011620958 0.013745933 0.028985507 0.011378939 21 390
+0.012074643 0.228320527 0.038461538 0.004267425 65 90
+0.01926858 0.010273299 0.052631579 0.018922275 22 50
+0.042440318 0.032610392 0.28708134 0.034193548 42 170
+0.049163595 0.011851432 0.196172249 0.047400436 24 530
+0.012607028 0.011083679 0.066350711 0.012004674 24 210
+0.002229378 0.052266535 0 0.002352326 63 610
+0.071277082 0.004387242 0.085308057 0.071215254 47 710
+0.027341192 0.034708579 0.037735849 0.026967436 62 30
+0.003667482 0.086389568 0.033018868 0.000892061 37 70
+0.063050154 0.043398158 0.103773585 0.061202654 45 370
+0.007585247 0.014752958 0.018867925 0.007416302 21 410
+0.008773938 0.016174563 0.023584906 0.008530438 24 750
+0.007998488 0.013477768 0.018691589 0.0078524 34 330
+0.003453039 0.073895028 0.028037383 0.001491424 37 1190
+0.016338336 0.01680738 0.004651163 0.016538125 21 30
+0.005132592 0.183917879 0.027906977 0 65 210
+0.005481121 0.130937881 0.027906977 0.002102313 64 370
+0.035998085 0.00689325 0.069444444 0.035765931 51 590
+0.028010534 0.051951161 0.036866359 0.027525253 43 890
+0.004399472 0.047954245 0.032110092 0.003003697 35 170
+0.018678939 0.014753655 0.091743119 0.017584833 22 450
+0.016085409 0.010344009 0.077981651 0.015438462 34 1450
+0.01223184 0.020605947 0.0456621 0.011528485 34 50
+0.003976862 0.079537238 0.013636364 0.003142184 62 350
+0.040073277 0.025188917 0.036363636 0.040169133 34 550
+0.023926639 0.007729604 0.059090909 0.023652716 22 650
+0.049276848 0.003417688 0.045454545 0.049289957 48 830
+0.020874555 0.021764939 0.090909091 0.019316343 21 830
+0.016044121 0.013850589 0.162895928 0.01398157 46 350
+0.017536223 0.013742926 0.049773756 0.017087011 22 370
+0.008181246 0.13971051 0.031531532 0.004389173 64 690
+0.010111309 0.009431557 0.085585586 0.009392692 24 910
+0.007216309 0.040230922 0.130044843 0.002067669 32 650
+0.052493037 0.006825625 0.098654709 0.052175789 41 1250
+0.016548463 0.13179669 0.044843049 0.012253233 23 1430
+0.022582102 0.014452545 0.0625 0.021996727 34 1770
+0.003206841 0.039907358 0.017857143 0.002597885 32 1810
+0.006735897 0.062868369 0.03125 0.005091345 49 2590
+0.043289574 0.024411414 0.062222222 0.042815836 40 1530
+0.005992809 0.029964043 0.013333333 0.005766063 21 1750
+0.001903855 0.107567825 0.008849558 0.001066667 37 710
+0.000978474 0.074037834 0.004405286 0.000704473 35 490
+0.012618297 0.044755521 0.013215859 0.012590299 43 550
+0.018072289 0.075970549 0.052863436 0.01521188 23 790
+0.004694836 0.059467919 0.052631579 0.001663894 62 810
+0.010190217 0.154891304 0.052631579 0.002411576 62 910
+0.005028981 0.019434027 0.026315789 0.004607093 31 1210
+0.005385392 0.038707506 0.056521739 0.003326331 67 470
+0.00359928 0.068986203 0.013043478 0.002899485 35 730
+0.008712871 0.045544554 0.034782609 0.00746888 40 1875
+0.001624507 0.053608726 0.012987013 0.000980873 63 710
+0.041786942 0.031752577 0.03030303 0.042163543 62 1050
+0.056437921 0.006353031 0.090517241 0.05622003 41 1270
+0.010970983 0.007929184 0.043103448 0.010714163 42 1390
+0.056534462 0.014507191 0.231759657 0.053955016 21 1810
+0.008782435 0.093413174 0.068376068 0.002642008 63 370
+0.023164279 0.015098722 0.106837607 0.021881551 24 970
+0.008199922 0.018352206 0.025531915 0.007875895 66 50
+0.001155624 0.090523883 0.008510638 0.000423549 65 170
+0.004176904 0.057739558 0.004255319 0.004172099 67 510
+0.064491711 0.017788207 0.259574468 0.060958693 56 1090
+0.039528024 0.138643068 0.272340426 0.002054795 62 1130
+0.001158749 0.068366165 0.008474576 0.000621891 35 1650
+0.017337079 0.016240663 0.063291139 0.016578434 34 410
+0.001258653 0.074889868 0 0.001360544 64 110
+0.023231018 0.007689822 0.079831933 0.022792394 51 1450
+0.013851724 0.037620022 0.012552301 0.013902519 62 110
+0.004535147 0.019355361 0.020920502 0.004211743 31 710
+0.001570133 0.041870202 0.016666667 0.000910415 32 470
+0.011030589 0.006985069 0.054166667 0.010727161 49 3490
+0.007108331 0.034262155 0.004149378 0.007213308 21 710
+0.004630395 0.019927237 0.004149378 0.004640175 31 1730
+0.006213753 0.10024855 0.041322314 0.002302026 37 750
+0.007616487 0.108870968 0.04526749 0.003016591 64 70
+0.008241577 0.016021626 0.037037037 0.007772715 34 1170
+0.006481511 0.007939852 0.073469388 0.005945381 24 370
+0.004483826 0.026155653 0.020408163 0.004056128 32 1370
+0.007749207 0.087002466 0.08097166 0.000771605 37 690
+0.163854806 0.063139059 0.441295547 0.145156889 45 750
+0.003312102 0.020976645 0.024291498 0.002862595 32 890
+0.003612554 0.055768797 0.028340081 0.00215208 35 1350
+0.015358173 0.013750277 0.056451613 0.014785248 49 10
+0.004689755 0.089466089 0.024193548 0.002773376 72 210
+0.022407849 0.016441262 0.161290323 0.020086277 45 1170
+0.017258271 0.01869018 0.245967742 0.012902235 49 3630
+0.019776119 0.009253731 0.056451613 0.019433564 49 4230
+0.024838013 0.268898488 0.080321285 0.004431315 65 110
+0.010583658 0.038754864 0.040160643 0.009391192 49 250
+0.055314971 0.006269198 0.0562249 0.05530923 41 550
+0.001120762 0.070047632 0.004 0.000903887 35 1050
+0.034200312 0.007671384 0.103585657 0.033663915 24 870
+0.018496899 0.009157242 0.047808765 0.018226002 53 1150
+0.00345411 0.017693501 0.019920319 0.003157517 31 1810
+0.010310363 0.026512362 0.05952381 0.008970064 22 70
+0.009134234 0.100079428 0.011904762 0.008826125 43 850
+0.017555373 0.010336341 0.027777778 0.017448607 49 1210
+0.044425459 0.053823152 0.015873016 0.046049661 49 2490
+0.005046584 0.098602484 0.043307087 0.000861326 71 430
+0.011714805 0.025752373 0.070588235 0.010158599 46 830
+0.007243461 0.034205231 0.070588235 0.005 32 1630
+0.017909526 0.039524471 0.15625 0.012216685 46 10
+0.005100671 0.022908277 0.0234375 0.004670757 32 870
+0.070093458 0.046010065 0.26171875 0.060851545 45 890
+0.036684467 0.029719062 0.0546875 0.036133046 42 1170
+0.004082274 0.020097347 0.0234375 0.003685307 22 1810
+0.004095892 0.030960125 0.023346304 0.003480855 31 30
+0.00748363 0.1202058 0.023346304 0.005316321 64 1090
+0.020097671 0.016153268 0.104651163 0.018709431 24 730
+0.030682571 0.017163685 0.003861004 0.031150968 43 10
+0.017563156 0.009189611 0.015444015 0.017582811 42 70
+0.012666415 0.013960006 0.030888031 0.01240844 34 830
+0.083785058 0.03075654 0.402298851 0.073677812 45 50
+0.002573175 0.041974912 0.01532567 0.002014437 61 170
+0.002638522 0.05760774 0.022900763 0.001399907 62 950
+0.00263095 0.062903612 0.022813688 0.001276161 68 110
+0.044343826 0.018901825 0.011406844 0.04497839 47 1250
+0.006847027 0.014696059 0.034090909 0.006440678 34 130
+0.020490531 0.082272586 0.154716981 0.008457375 13 410
+0.028523302 0.01493543 0.056390977 0.028100775 42 830
+0.021274377 0.027948691 0.074626866 0.019740371 34 1510
+0.00495322 0.14804623 0.011152416 0.003875969 62 190
+0.006189003 0.064032373 0.022304833 0.00508647 37 330
+0.003832886 0.052127252 0.029411765 0.002426203 68 50
+0.008965028 0.01207172 0.036764706 0.008625337 22 850
+0.005624121 0.04249336 0.022058824 0.004894763 63 870
+0.024050024 0.010942761 0.036630037 0.023910841 49 370
+0.21899599 0.020273281 0.432234432 0.214583491 45 450
+0.005827506 0.079545455 0.010989011 0.00538145 49 2950
+0.040222565 0.018368305 0.434306569 0.03284846 52 210
+0.003652665 0.04549228 0.018248175 0.002957036 62 730
+0.003668042 0.025126089 0.040145985 0.002727871 53 1390
+0.015936022 0.008055572 0.039855072 0.015741776 53 510
+0.006408094 0.093086003 0.036231884 0.003346969 64 750
+0.021139706 0.042279412 0.043478261 0.020153551 34 2190
+0.013740734 0.02504068 0.014440433 0.013722763 34 30
+0.017722004 0.026535109 0.0433213 0.017024208 52 390
+0.0090009 0.023075035 0.074468085 0.007454561 49 550
+0.005852231 0.068763716 0.017730496 0.004975124 43 1110
+0.003833209 0.026458489 0.021201413 0.003361183 32 30
+0.005867449 0.018869183 0.017667845 0.005640503 32 730
+0.006151909 0.013708584 0.024734982 0.00589362 22 1690
+0.047025386 0.018345068 0.073943662 0.04652234 52 170
+0.014218933 0.027658746 0.066901408 0.012720353 52 250
+0.037311458 0.037576078 0.102112676 0.034781413 53 770
+0.008592201 0.018770654 0.077464789 0.007274687 49 3990
+0.005035247 0.095669688 0.031578947 0.002227171 63 130
+0.018126445 0.010457564 0.049122807 0.017798873 22 1050
+0.001172726 0.048081756 0.006968641 0.000879972 63 750
+0.025411653 0.011360038 0.080139373 0.0247828 46 750
+0.016442308 0.027596154 0.090592334 0.014337981 42 1070
+0.09191127 0.012861304 0.163763066 0.090975123 51 1330
+0.002343292 0.084358524 0.003472222 0.002239283 61 150
+0.007651642 0.020033389 0.027777778 0.007240204 34 530
+0.017036593 0.034553089 0.152777778 0.012178452 34 1730
+0.05101458 0.00868781 0.020761246 0.051279719 54 1790
+0.033573834 0.027898028 0.020689655 0.033943592 43 390
+0.052691408 0.019391508 0.389655172 0.046027958 56 670
+0.00867679 0.070137383 0.065292096 0.004406428 32 630
+0.011025358 0.080485116 0.078767123 0.005095923 62 250
+0.013733631 0.046630469 0.034246575 0.012730318 43 970
+0.007071943 0.019855841 0.04109589 0.006382684 22 1370
+0.006361616 0.022380624 0.078767123 0.004704038 53 1510
+0.005969101 0.102879213 0.017064846 0.004696673 62 370
+0.00871731 0.036488169 0.105802048 0.005040713 32 550
+0.006247397 0.122032486 0.037542662 0.001897533 73 590
+0.004949951 0.032339677 0.030612245 0.004092304 32 950
+0.008246289 0.08108851 0.071186441 0.002692193 68 450
+0.003292324 0.051290937 0.02027027 0.002374429 63 450
+0.040495632 0.014099886 0.239057239 0.037655896 56 30
+0.005769759 0.027200293 0.067340067 0.004048202 32 1590
+0.244848538 0.013120817 0.083892617 0.246988489 45 10
+0.006890254 0.024738502 0.087248322 0.00485189 53 30
+0.006970509 0.079892761 0.036912752 0.004370629 63 590
+0.033995714 0.029027859 0.033557047 0.034008828 45 630
+0.032730889 0.012884815 0.046979866 0.032544897 34 1950
+0.005849282 0.016979953 0.023411371 0.005545927 32 110
+0.006444683 0.107053348 0.036789298 0.002806736 63 850
+0.005206349 0.037968254 0.02006689 0.004619852 49 1710
+0.01252091 0.015207584 0.063333333 0.011736243 24 1490
+0.001858205 0.043024585 0.013289037 0.001344287 67 270
+0.007630227 0.044167278 0.07641196 0.00445195 32 390
+0.010252029 0.02580094 0.079470199 0.008418837 34 110
+0.007139612 0.01960148 0.056291391 0.006156902 24 1750
+0.036525612 0.033853007 0.042763158 0.036307054 66 130
+0.010058676 0.127409891 0.049342105 0.004322767 37 1370
+0.002471524 0.032774554 0.013114754 0.002110877 32 1410
+0.037221381 0.033109717 0.049019608 0.036817368 46 590
+0.018583506 0.024405806 0.088235294 0.016841073 53 1530
+0.01851993 0.023321393 0.075163399 0.017167382 40 7300
+0.018339894 0.013279121 0.048859935 0.01792916 22 190
+0.002188868 0.095997498 0.009771987 0.001383604 36 330
+0.012022565 0.014242116 0.032467532 0.011727179 24 1650
+0.009687836 0.110871905 0.061488673 0.00322841 37 950
+0.0179941 0.018348083 0.080385852 0.016827934 22 290
+0.023208616 0.0102131 0.048076923 0.022952012 42 1430
+0.006266319 0.163446475 0.019169329 0.003745318 64 590
+0.005808462 0.014828107 0.050955414 0.005128943 24 410
+0.00294861 0.044229149 0.00952381 0.002644337 66 170
+0.009330191 0.030936948 0.06031746 0.007702442 21 1330
+0.009056877 0.038038884 0.044444444 0.007657545 32 1450
+0.017090909 0.057272727 0.13015873 0.010221794 49 2390
+0.008396641 0.1267493 0.047318612 0.002747253 63 630
+0.018489131 0.008010277 0.08490566 0.01795282 52 190
+0.018259156 0.032990974 0.06918239 0.016521832 34 1350
+0.023443182 0.017848151 0.087774295 0.022274126 34 90
+0.010557165 0.010623772 0.015673981 0.010502222 21 290
+0.003018911 0.018227387 0.009375 0.002900905 32 590
+0.032714261 0.010938831 0.021806854 0.032834895 44 2150
+0.008009582 0.024103601 0.031055901 0.007440362 21 170
+0.02232027 0.016714249 0.049689441 0.021855039 53 270
+0.027253248 0.021628499 0.154798762 0.024433646 56 570
+0.12966805 0.084024896 0.194444444 0.123725934 45 870
+0.016006098 0.027523713 0.095384615 0.013759471 24 1250
+0.003529412 0.029502262 0.033742331 0.002610966 32 310
+0.005964527 0.051169361 0.058282209 0.003143093 63 530
+0.250728118 0.016657299 0.17791411 0.251961548 45 550
+0.009118887 0.037273453 0.04587156 0.007695951 49 3210
+0.170164657 0.019132653 0.103030303 0.17147417 66 470
+0.013930571 0.014781635 0.036363636 0.013593999 24 650
+0.002191381 0.060628196 0.006024096 0.001944012 68 90
+0.012907797 0.010227658 0.039156627 0.012636559 53 1130
+0.009443395 0.012331963 0.048048048 0.00896138 53 1190
+0.031607879 0.153000458 0.113772455 0.016765819 23 1190
+0.006314581 0.192307692 0.032835821 0 64 510
+0.031505673 0.015187814 0.053571429 0.031165374 49 2570
+0.018414349 0.017883677 0.091988131 0.01707462 21 510
+0.003382492 0.045595995 0.029673591 0.002126453 62 630
+0.20962795 0.014979775 0.258160237 0.208889892 45 970
+0.01732937 0.018955751 0.026627219 0.017149717 49 970
+0.067139738 0.020621058 0.161764706 0.065147387 56 50
+0.002297442 0.052228519 0.011730205 0.001777634 63 510
+0.025906736 0.013526876 0.035087719 0.025780843 34 310
+0.02164831 0.04329662 0.114035088 0.017467249 49 4930
+0.007039775 0.120732137 0.043731778 0.002001601 68 430
+0.01200998 0.014505011 0.049562682 0.011457261 24 570
+0.007330044 0.016432712 0.023323615 0.007062835 53 750
+0.001751654 0.066757493 0.008746356 0.001251303 35 1010
+0.004665826 0.043253468 0.032069971 0.003426914 32 1430
+0.016795712 0.013619218 0.032069971 0.016584816 40 6800
+0.004713471 0.085338626 0.01744186 0.003525902 72 490
+0.016394068 0.006557627 0.026162791 0.016329585 53 1330
+0.003817522 0.021950754 0.060869565 0.00253708 32 570
+0.005974735 0.059064527 0.052023121 0.00308418 63 470
+0.002769231 0.106769231 0.014409222 0.001377885 63 770
+0.011347875 0.043878452 0.083333333 0.00804431 22 1110
+0.01089531 0.082662245 0.103151862 0.002581978 14 230
+0.034474199 0.010860459 0.074285714 0.03403708 51 190
+0.007441039 0.014755959 0.048433048 0.006827104 22 690
+0.012731131 0.017732646 0.045584046 0.012138045 53 1470
+0.052806297 0.012046543 0.0625 0.052688098 41 430
+0.007308161 0.018641106 0.011363636 0.007231126 34 1150
+0.023470243 0.147946354 0.059490085 0.017215937 23 390
+0.005842259 0.11489776 0.028248588 0.002933627 63 730
+0.009835605 0.01658002 0.039548023 0.009334667 24 1610
+0.010415667 0.016991456 0.079096045 0.009228516 21 1730
+0.006625097 0.069368667 0.061797753 0.002512563 62 390
+0.039902808 0.019222462 0.238764045 0.036005285 56 830
+0.00701107 0.131734317 0.014005602 0.005949851 73 190
+0.00502956 0.031500926 0.016806723 0.004646501 53 530
+0.006970652 0.015850464 0.025210084 0.006676893 21 1090
+0.09536167 0.01064877 0.308123249 0.093071635 40 1950
+0.009090909 0.271212121 0.033519553 0 65 50
+0.011474469 0.014670929 0.061452514 0.010730328 21 810
+0.002972973 0.097027027 0.016713092 0.001496558 37 550
+0.047385961 0.037388044 0.211699164 0.041004003 49 1850
+0.057605178 0.116504854 0.125 0.048717949 6 130
+0.027901658 0.02058319 0.022222222 0.028021016 56 410
+0.005553352 0.020400068 0.052777778 0.004569908 32 1330
+0.009723261 0.01696896 0.03030303 0.009368016 34 470
+0.008709016 0.185963115 0.033057851 0.003146633 71 910
+0.034173669 0.010732714 0.076923077 0.033709875 47 630
+0.011160086 0.034230517 0.01369863 0.011070111 32 430
+0.013909965 0.092564492 0.139344262 0.001114827 62 1150
+0.008344654 0.035804386 0.073170732 0.005937406 62 990
+0.001115005 0.058935967 0.008108108 0.000677048 68 290
+0.006849315 0.181017613 0.035135135 0.000597372 64 350
+0.003294591 0.034135627 0.018766756 0.002747773 32 1830
+0.029566334 0.015003811 0.058823529 0.029120678 56 250
+0.022590128 0.029011295 0.074666667 0.021034181 40 1290
+0.008935825 0.076360682 0.061170213 0.004617414 72 230
+0.021041721 0.019642394 0.031662269 0.020828928 34 1090
+0.01651385 0.018242043 0.042105263 0.016038336 34 2170
+0.003638453 0.036480276 0.026246719 0.00278247 62 1250
+0.018932767 0.023869033 0.094240838 0.017091282 34 10
+0.012137693 0.025402932 0.054830287 0.011024908 21 490
+0.01633459 0.011458146 0.062663185 0.015797597 24 1670
+0.013017751 0.075739645 0.119791667 0.004268032 67 150
+0.020258863 0.108047271 0.166666667 0.002523659 63 390
+0.006934507 0.042267474 0.033854167 0.005746466 43 710
+0.006815084 0.043843707 0.012953368 0.006533618 61 270
+0.018235542 0.03258759 0.049222798 0.017191727 49 350
+0.005654124 0.075453305 0.041343669 0.002741459 32 230
+0.025080906 0.026294498 0.030769231 0.024927295 40 410
+0.037580438 0.05032175 0.140664962 0.032118173 53 290
+0.123862636 0.02869093 0.033248082 0.126539246 66 490
+0.006761325 0.264367816 0.020460358 0.001838235 62 970
+0.011239563 0.031390495 0.046035806 0.010111894 21 1250
+0.007339724 0.029358898 0.094387755 0.00470679 31 1010
+0.004709891 0.039282493 0.022959184 0.003963701 35 1270
+0.005342152 0.099974561 0.038167939 0.001695873 68 190
+0.011097271 0.044502321 0.06870229 0.008414316 22 1310
+0.005658411 0.019506626 0.055979644 0.004657285 24 1710
+0.006099627 0.133514063 0.035532995 0.001564333 64 1070
+0.008434751 0.047596096 0.083544304 0.004681174 53 450
+0.00536789 0.02141734 0.032911392 0.004765071 22 1130
+0.004981539 0.023208111 0.01010101 0.004859903 32 10
+0.026926713 0.013901047 0.214105793 0.024288048 14 90
+0.004760571 0.05558667 0.017632242 0.004002965 35 830
+0.007059229 0.068526171 0.042713568 0.004436229 63 650
+0.015909091 0.043073593 0.155778894 0.00961321 53 930
+0.005705833 0.084530854 0.0375 0.002770083 37 610
+0.00592718 0.08488569 0.052369077 0.001619246 72 250
+0.001803969 0.060282622 0.004987531 0.001599744 63 410
+0.015087806 0.049839228 0.037220844 0.013926851 49 2550
+0.015199804 0.049644521 0.051851852 0.01328518 43 1070
+0.027902686 0.046811945 0.091133005 0.024797387 44 1270
+0.004707686 0.044558791 0.029484029 0.003552194 32 1270
+0.016043408 0.012025224 0.051219512 0.015615259 53 150
+0.009764132 0.024116228 0.02195122 0.009462962 53 830
+0.0115625 0.128125 0.065853659 0.003584229 36 870
+0.028240166 0.033954451 0.297560976 0.018774111 24 1210
+0.01167681 0.020335461 0.111922141 0.00959596 53 390
+0.004037267 0.042546584 0.02919708 0.002919235 35 650
+0.021613833 0.014444366 0.094890511 0.020539885 49 2170
+0.013823705 0.035241915 0.099273608 0.010702282 21 50
+0.010406504 0.13495935 0.062650602 0.002255639 61 230
+0.075175409 0.046218955 0.315662651 0.063521719 45 470
+0.018930142 0.025403029 0.007211538 0.019235589 40 590
+0.012036192 0.034614427 0.026378897 0.011521926 43 830
+0.014624543 0.03937377 0.026190476 0.014150483 45 1030
+0.151451187 0.037027265 0.406175772 0.141656772 45 1010
+0.007099025 0.05077608 0.035545024 0.005577386 43 1170
+0.002414992 0.040861669 0.004728132 0.002316447 32 1230
+0.023860247 0.09011504 0.087470449 0.01756029 40 1490
+0.031194068 0.013551521 0.058962264 0.030812597 11 50
+0.008008324 0.013368225 0.025943396 0.007765315 46 490
+0.003659849 0.091280947 0.018867925 0.002132196 37 850
+0.0210766 0.012406586 0.025882353 0.021016228 24 1410
+0.006186406 0.017297517 0.032941176 0.005715469 49 4510
+0.004963504 0.124379562 0.023474178 0.002334111 62 1190
+0.006305671 0.018828821 0.032786885 0.005797492 32 1910
+0.023110326 0.01575035 0.091121495 0.022021985 22 930
+0.007015903 0.066884939 0.034965035 0.005012531 35 630
+0.040267895 0.035914609 0.095571096 0.038207711 34 1610
+0.005146262 0.05823402 0.053488372 0.002157032 63 430
+0.003699284 0.051431981 0.041763341 0.001635426 35 310
+0.009514075 0.008491901 0.074074074 0.008961142 14 550
+0.012382415 0.020733346 0.050925926 0.01156636 34 1750
+0.001522456 0.110124334 0.002304147 0.00142572 72 370
+0.070715734 0.033915484 0.073563218 0.07061577 31 1290
+0.005270033 0.019638755 0.055045872 0.004272915 24 50
+0.012502469 0.012333136 0.054919908 0.011972797 24 450
+0.004270897 0.13361806 0.011415525 0.003169014 64 330
+0.011563779 0.077922078 0.116438356 0.002701138 35 870
+0.042312702 0.009244086 0.072727273 0.042028924 47 1830
+0.013551216 0.176165803 0.063348416 0.002902758 64 390
+0.006387694 0.057619606 0.045248869 0.00401162 32 1050
+0.045250561 0.033208676 0.335585586 0.035277735 56 10
+0.005366452 0.068077277 0.02027027 0.004277723 73 30
+0.017276913 0.019469415 0.094594595 0.015741693 53 810
+0.008091842 0.022454863 0.02027027 0.007812096 24 1070
+0.034039878 0.040057482 0.062780269 0.032840569 52 90
+0.022848665 0.066172107 0.116591928 0.01620591 49 910
+0.003665988 0.182077393 0.011185682 0.001992032 73 550
+0.020481167 0.03135302 0.178970917 0.015351195 56 910
+0.009050196 0.020899422 0.020089286 0.008814561 34 1470
+0.011020062 0.127154563 0.051111111 0.00517967 37 410
+0.002768933 0.06243943 0.019955654 0.001624335 62 1210
+0.004375631 0.076236957 0.024282561 0.002732738 65 30
+0.01160444 0.229061554 0.04845815 0.00065445 65 270
+0.017158202 0.01078923 0.09030837 0.01636036 53 370
+0.012645237 0.016640399 0.063876652 0.0117783 22 430
+0.010554224 0.023147912 0.033039648 0.0100214 24 1370
+0.011103745 0.037620152 0.105726872 0.007404856 46 1490
+0.004365224 0.082757366 0.028571429 0.002181241 37 650
+0.00547179 0.055447471 0.026315789 0.004248198 32 1970
+0.006924409 0.131852279 0.026258206 0.003988036 63 210
+0.021799975 0.014211967 0.054704595 0.021325594 32 350
+0.007754302 0.01995208 0.032751092 0.00724541 24 110
+0.014771997 0.294797688 0.043572985 0.00273224 62 1110
+0.038849227 0.008166664 0.071583514 0.038579696 47 670
+0.022837066 0.202898551 0.084415584 0.007162534 36 850
+0.012780295 0.053793424 0.041036717 0.01117387 21 1290
+0.010790005 0.029467441 0.017130621 0.01059749 22 10
+0.004752206 0.063408011 0.010706638 0.004349087 43 90
+0.008477464 0.019894351 0.057815846 0.007475986 24 310
+0.005399286 0.040668815 0.025695931 0.004538853 32 850
+0.021884858 0.046135647 0.147435897 0.015812319 49 210
+0.006637872 0.021323028 0.019189765 0.006364397 21 670
+0.003344482 0.087328131 0.021276596 0.001628664 61 50
+0.005106826 0.048983846 0.05106383 0.002739726 63 310
+0.016147541 0.019262295 0.036170213 0.015754283 32 990
+0.010377054 0.030293265 0.078723404 0.008241941 40 5100
+0.019840715 0.013161939 0.033970276 0.019652263 53 1010
+0.014255911 0.164116829 0.078389831 0.001663894 37 630
+0.007096513 0.096107056 0.033755274 0.004262001 73 210
+0.006518905 0.102998696 0.044303797 0.002180233 37 730
+0.008957655 0.129478827 0.056603774 0.001870907 65 10
+0.008141113 0.020267978 0.071129707 0.006838051 22 1030
+0.030778568 0.041789492 0.195020747 0.023615635 46 1510
+0.082474477 0.011716091 0.10373444 0.082222441 51 1950
+0.002596653 0.139353722 0.01242236 0.001005699 64 250
+0.007161583 0.030884328 0.031055901 0.006400106 31 1070
+0.00478687 0.110098017 0.031055901 0.001536885 37 1070
+0.011536613 0.038775837 0.024793388 0.011001834 43 810
+0.011231102 0.069690425 0.035123967 0.009441263 49 2870
+0.00533558 0.136759337 0.028747433 0.001626545 71 150
+0.026904177 0.05982801 0.316221766 0.008493401 46 270
+0.012788867 0.010384304 0.071721311 0.012170473 49 1810
+0.006186317 0.0444869 0.028629857 0.005141388 53 730
+0.02071601 0.049175382 0.053169734 0.019037546 34 730
+0.025878473 0.062032221 0.096114519 0.021233433 40 6700
+0.004419776 0.050570459 0.028455285 0.003139547 32 1470
+0.006963494 0.104030386 0.044624746 0.002590674 64 30
+0.012875806 0.010363454 0.038383838 0.012608686 53 1250
+0.005687831 0.065740741 0.042253521 0.003114824 35 1330
+0.002862869 0.035714286 0.02004008 0.002226676 31 770
+0.002393162 0.085470085 0.014 0.001308411 63 70
+0.002749771 0.114573786 0.018 0.000776398 71 270
+0.020023175 0.023174971 0.152 0.016892052 42 690
+0.047227531 0.007466335 0.113095238 0.046732041 48 450
+0.015241321 0.072114028 0.033268102 0.013840304 43 1010
+0.013004409 0.014288106 0.048828125 0.012485137 44 210
+0.047401339 0.018726455 0.162109375 0.04521227 34 1870
+0.007288116 0.030210415 0.027237354 0.006666667 32 1210
+0.020010262 0.132119035 0.099029126 0.007981082 61 110
+0.003279485 0.06255314 0.011650485 0.002720912 67 410
+0.006034525 0.025572538 0.032882012 0.005329949 22 330
+0.004933586 0.098481973 0.032755299 0.001894338 37 490
+0.008953946 0.020204773 0.052023121 0.008065798 34 1270
+0.012960329 0.082029398 0.123314066 0.003099174 32 1350
+0.006231084 0.092398077 0.034682081 0.003334641 32 1530
+0.002920096 0.046013627 0.001923077 0.002968185 31 10
+0.025803241 0.018481661 0.107692308 0.024261298 52 110
+0.003798343 0.089951657 0.021113244 0.002086891 71 330
+0.014832199 0.024501291 0.076628352 0.013280085 22 210
+0.013364208 0.034667549 0.066793893 0.011445412 25 1030
+0.010401952 0.067548478 0.020912548 0.009640545 43 1150
+0.034858711 0.039086257 0.20113852 0.028095091 46 110
+0.013099907 0.021347997 0.024621212 0.012848585 53 310
+0.045441445 0.050725334 0.414772727 0.0257059 49 3950
+0.032898084 0.08970663 0.270321361 0.009500745 14 1130
+0.005744317 0.064654119 0.037807183 0.003528028 35 1810
+0.003461705 0.076445983 0.026415094 0.001561768 37 470
+0.006293587 0.090151386 0.032075472 0.003739017 71 490
+0.006857143 0.075857143 0.062146893 0.002318751 68 310
+0.006088974 0.022077707 0.071294559 0.004616883 24 830
+0.009397457 0.049290584 0.035514019 0.008043415 53 1030
+0.00350186 0.058656161 0.027985075 0.001976285 35 450
+0.021390374 0.191800357 0.081784387 0.007057786 3 190
+0.01002004 0.154594904 0.02037037 0.008127328 33 870
+0.013665702 0.024037391 0.062962963 0.012451539 22 1530
+0.007608179 0.052116025 0.109489051 0.002006622 62 290
+0.01280812 0.066457226 0.112727273 0.005695056 62 850
+0.00331379 0.070609228 0.005415162 0.003154142 63 490
+0.015928892 0.048842735 0.100900901 0.011565507 49 390
+0.012166272 0.020953025 0.041218638 0.01154451 34 1590
+0.018491896 0.011008488 0.135957066 0.017184389 52 410
+0.036773156 0.026062974 0.183421517 0.032848782 56 770
+0.024067488 0.046894384 0.08994709 0.020826102 14 930
+0.003682171 0.110077519 0.022887324 0.00130662 64 570
+0.00742194 0.01054989 0.056140351 0.006902486 14 410
+0.004584661 0.044447898 0.036713287 0.003090185 2 150
+0.010151312 0.10974909 0.022687609 0.008605852 23 190
+0.020930233 0.190365449 0.085514834 0.005744768 23 850
+0.00494801 0.020724274 0.055363322 0.003881078 32 150
+0.029535865 0.06968053 0.183391003 0.018012181 49 510
+0.007272727 0.162237762 0.024137931 0.004006678 64 550
+0.003703261 0.069406284 0.036144578 0.001283697 35 1610
+0.004125674 0.092668994 0.017123288 0.002798181 67 230
+0.007266482 0.025718941 0.04109589 0.006373457 23 590
+0.007398486 0.10065382 0.035897436 0.004208915 37 150
+0.008155301 0.029319057 0.020477816 0.007783104 22 990
+0.002934661 0.016859913 0.008532423 0.002838664 49 3750
+0.005174644 0.084375449 0.042589438 0.001726845 72 270
+0.006449583 0.092339154 0.044293015 0.002599653 63 670
+0.01126549 0.221179121 0.03565365 0.004339441 65 130
+0.006420546 0.072725028 0.054329372 0.002663116 61 210
+0.00283852 0.061921783 0.006791171 0.002577608 72 330
+0.003589744 0.100683761 0.008488964 0.003041247 37 1110
+0.008186593 0.091133766 0.033898305 0.00560843 35 770
+0.005328056 0.089815801 0.018644068 0.004014049 35 1730
+0.007177364 0.081573499 0.055837563 0.002855425 37 590
+0.005661564 0.041378346 0.030405405 0.004593511 31 1750
+0.013728814 0.100508475 0.092748735 0.00489919 25 910
+0.012504701 0.055848063 0.074074074 0.008862776 22 730
+0.006754512 0.065994906 0.055369128 0.003319502 35 130
+0.005586081 0.05467033 0.036850921 0.003777972 32 610
+0.039201638 0.061207779 0.244147157 0.025839512 56 930
+0.063592493 0.064128686 0.043478261 0.06497078 49 4790
+0.049074473 0.258286698 0.18 0.003482298 36 70
+0.008003961 0.04959155 0.051580699 0.005730161 31 1170
+0.004154765 0.078161516 0.03654485 0.001408451 73 390
+0.009477073 0.013551539 0.031561462 0.009173684 56 1070
+0.016055902 0.032111805 0.016611296 0.016037476 34 1210
+0.010386613 0.174264282 0.036423841 0.004891684 64 430
+0.010808819 0.037155315 0.163636364 0.004911341 31 1830
+0.00553275 0.108156345 0.02640264 0.003001801 37 30
+0.015121285 0.063635409 0.047854785 0.012896714 49 4690
+0.016951608 0.02936347 0.046052632 0.016071251 47 1290
+0.012926748 0.201856148 0.0591133 0.001245847 64 790
+0.009576918 0.074069584 0.099836334 0.002356638 68 150
+0.026402972 0.015334806 0.063829787 0.0258201 48 790
+0.005171475 0.08328797 0.019607843 0.003859857 35 970
+0.005081967 0.100655738 0.03257329 0.002005104 73 170
+0.008284714 0.07176196 0.02601626 0.006913891 35 1210
+0.013990133 0.045946543 0.059294872 0.011808289 53 110
+0.006966041 0.077745988 0.0608 0.002427839 68 30
+0.007151371 0.148986889 0.032 0.00280112 64 450
+0.006802721 0.158226253 0.02866242 0.002693804 36 110
+0.00468506 0.081728267 0.031847134 0.002267574 35 330
+0.089705755 0.027297978 0.222575517 0.085976888 45 190
+0.010211186 0.145973544 0.052464229 0.00298913 64 890
+0.002722137 0.032408834 0.006339144 0.002600987 32 450
+0.019793814 0.130103093 0.109350238 0.006399621 32 1950
+0.002529244 0.200126462 0.001579779 0.002766798 64 970
+0.017957021 0.093170445 0.145339652 0.004869339 35 1250
+0.005602032 0.047804004 0.0515625 0.003294634 31 730
+0.003064978 0.130976706 0.009360374 0.002116153 64 950
+0.014534884 0.233284884 0.056074766 0.001895735 65 190
+0.019935562 0.129681836 0.097826087 0.008329477 68 410
+0.015481172 0.135355649 0.071097372 0.00677474 71 90
+0.011709775 0.047950789 0.00927357 0.011832477 43 690
+0.005533269 0.089915618 0.030769231 0.003039976 35 430
+0.019512552 0.023824314 0.154907975 0.016208123 45 730
+0.012148943 0.051751341 0.170731707 0.003494176 24 190
+0.045580878 0.36520289 0.109589041 0.008756567 33 310
+0.009629045 0.10418311 0.037878788 0.006343612 25 10
+0.008219178 0.090547945 0.054462935 0.003615002 35 610
+0.049106654 0.07256385 0.31570997 0.028247252 49 410
+0.013528749 0.093573844 0.114457831 0.003109453 35 1230
+0.013835829 0.069695405 0.035555556 0.012208657 43 1090
+0.01369863 0.1222062 0.051622419 0.008418891 23 350
+0.012252531 0.021861095 0.184365782 0.008405854 14 870
+0.038675369 0.164370317 0.130882353 0.020538039 23 1370
+0.009055204 0.037148156 0.051395007 0.007421676 24 390
+0.011703203 0.014693544 0.007320644 0.011768559 44 890
+0.002725724 0.077683135 0.010233918 0.002093338 31 590
+0.013565891 0.110465116 0.033625731 0.0110748 14 1030
+0.011107481 0.223783077 0.040875912 0.002525253 65 230
+0.005272811 0.157267309 0.033527697 0 68 170
+0.015461664 0.027385826 0.126269956 0.012341643 22 550
+0.005529732 0.018405727 0.029027576 0.005089128 53 710
+0.011483559 0.018315213 0.04934688 0.010777146 41 1010
+0.024351597 0.057459761 0.143686502 0.017076624 49 1770
+0.023170268 0.029258805 0.088150289 0.021211725 34 510
+0.010652174 0.150652174 0.054834055 0.002815459 73 250
+0.014567383 0.046954401 0.073593074 0.01165932 31 870
+0.008320057 0.06133829 0.023088023 0.007355021 49 1870
+0.009627014 0.033621982 0.017266187 0.009361233 53 170
+0.005365012 0.04451683 0.014347202 0.004946524 32 130
+0.00810767 0.113020918 0.037302726 0.004387569 64 470
+0.007531646 0.044240506 0.030042918 0.006489636 35 1470
+0.008827238 0.126103405 0.037142857 0.00474129 35 150
+0.012837668 0.029355189 0.055555556 0.011545752 24 250
+0.008401125 0.013342964 0.05982906 0.007705644 22 350
+0.009743423 0.227996103 0.028490028 0.004206984 36 650
+0.015264599 0.02640363 0.143465909 0.011787819 14 310
+0.008204363 0.065728137 0.080851064 0.003093504 35 1450
+0.00567616 0.07030472 0.04674221 0.002570694 67 110
+0.008895325 0.146255689 0.042432815 0.003149988 63 90
+0.012119772 0.168963878 0.052039381 0.004003432 62 510
+0.011842374 0.048526509 0.081346424 0.008297568 32 290
+0.005610311 0.108263836 0.019607843 0.0039109 37 970
+0.029305686 0.009022934 0.027855153 0.029318894 47 810
+0.006763973 0.128337487 0.044382802 0.00122524 64 10
+0.004769475 0.143282989 0.019417476 0.002319647 64 870
+0.020569174 0.068000376 0.233425414 0.005038799 13 970
+0.014394672 0.02596054 0.093793103 0.012278509 21 1210
+0.016941883 0.07784688 0.096418733 0.010232558 49 890
+0.017238627 0.038786911 0.063100137 0.015388022 40 870
+0.015292663 0.030214345 0.038199181 0.014578994 34 710
+0.005413579 0.0221987 0.039509537 0.00463951 24 630
+0.006663549 0.043079261 0.023066486 0.005925111 23 150
+0.015863091 0.017090454 0.04200542 0.015408538 51 670
+0.022316555 0.02995755 0.025641026 0.022213887 42 510
+0.005145092 0.152706318 0.021563342 0.002186058 36 10
+0.042419818 0.0188057 0.255689424 0.038332264 56 550
+0.009837741 0.095822154 0.077333333 0.002684753 62 450
+0.013410756 0.034563805 0.082666667 0.010931309 53 1490
+0.090759144 0.031009031 0.155585106 0.088684625 56 590
+0.019992384 0.143183549 0.065159574 0.012444444 23 1130
+0.005949924 0.018286908 0.015936255 0.005763903 24 1090
+0.006205113 0.04326343 0.027888446 0.005224598 21 1870
+0.012845195 0.023796749 0.045092838 0.012059099 49 1390
+0.007987668 0.106081839 0.042272127 0.00391911 35 1630
+0.012701639 0.048202718 0.105401845 0.008006939 62 890
+0.002260958 0.055429947 0.010526316 0.001775925 72 30
+0.022342065 0.195428865 0.080157687 0.008298755 23 10
+0.009312321 0.182187202 0.0327654 0.004087591 64 190
+0.005677391 0.084938217 0.011795544 0.005109489 37 930
+0.00962326 0.156838657 0.046997389 0.0026712 36 310
+0.023926693 0.130324113 0.161458333 0.003317073 62 150
+0.01657197 0.181818182 0.076822917 0.00318287 21 1550
+0.019639697 0.022337617 0.046753247 0.019020207 44 2450
+0.026929778 0.029202333 0.094682231 0.024891733 21 770
+0.047443332 0.2042699 0.209032258 0.005962239 3 70
+0.008512679 0.046579941 0.030888031 0.007419517 2 270
+0.010651996 0.090951656 0.034749035 0.008241051 62 670
+0.007033382 0.040888096 0.05648267 0.0049253 24 510
+0.004518664 0.076522593 0.024390244 0.002872035 72 650
+0.010843692 0.103015075 0.070603338 0.00398054 35 1750
+0.007183908 0.101880878 0.024358974 0.005235602 25 770
+0.013670021 0.148281754 0.042253521 0.008693714 63 350
+0.003947888 0.154362416 0.017902813 0.00140056 64 730
+0.009351181 0.014538018 0.017902813 0.009225023 49 4850
+0.006462372 0.163227017 0.019157088 0.003986049 64 990
+0.019665415 0.053533629 0.225765306 0.00800808 24 670
+0.015606875 0.041495307 0.025412961 0.015182353 34 1830
+0.014445795 0.106655866 0.098734177 0.004382651 71 110
+0.00915678 0.062982722 0.067003793 0.005268525 32 1490
+0.010933679 0.114228169 0.078085642 0.002273835 67 450
+0.011240531 0.06467378 0.025188917 0.01027606 40 550
+0.031346435 0.129283742 0.100502513 0.021078157 23 890
+0.006413384 0.055489718 0.042713568 0.004280759 32 1310
+0.009942363 0.114841499 0.058971142 0.003581312 71 170
+0.015437262 0.060684411 0.115288221 0.008986399 25 230
+0.022146119 0.060958904 0.149812734 0.013858497 21 1890
+0.017859047 0.042755091 0.143391521 0.012252172 52 130
+0.020744894 0.064317181 0.194271482 0.008816983 14 1310
+0.003720171 0.073042374 0.019875776 0.002447142 43 110
+0.007201592 0.021107212 0.047146402 0.006340289 32 1250
+0.024427554 0.03857737 0.086741016 0.021927208 11 10
+0.003235827 0.105099663 0.00862069 0.002603413 72 410
+0.103966027 0.020550542 0.196801968 0.102018169 51 950
+0.013562827 0.049507359 0.063882064 0.010941899 13 950
+0.010042487 0.157589803 0.039215686 0.004585053 23 1290
+0.043493034 0.277607883 0.134638923 0.008466604 25 370
+0.004726101 0.043931257 0.023227384 0.003875969 40 130
+0.006139621 0.020131109 0.048602673 0.005267231 22 530
+0.015671104 0.018009748 0.078883495 0.014511785 40 7700
+0.004244326 0.076121056 0.023030303 0.002696495 35 1690
+0.00373413 0.154406273 0.013301088 0.001987194 64 1010
+0.027211975 0.044212777 0.116082225 0.023101018 49 3310
+0.010220522 0.041737992 0.091676719 0.006672621 23 230
+0.006601981 0.082924877 0.024125452 0.005017452 35 1290
+0.006842498 0.103632744 0.027611044 0.00444136 37 870
+0.017712692 0.116457462 0.111377246 0.005367009 62 1070
+0.011945884 0.120322395 0.077751196 0.002945026 32 1650
+0.050820405 0.026719019 0.151732378 0.048050116 45 790
+0.015617978 0.094494382 0.064209275 0.010547214 6 150
+0.007094994 0.166141112 0.024911032 0.003545261 64 770
+0.018621974 0.120899585 0.03436019 0.016457553 68 130
+0.008538899 0.133617963 0.049704142 0.002190181 37 190
+0.019375673 0.151596699 0.093491124 0.006132375 37 310
+0.026609052 0.048044121 0.202366864 0.017738756 49 1450
+0.028001522 0.013992491 0.072104019 0.02737566 24 1450
+0.008821454 0.149435427 0.041322314 0.003111388 64 530
+0.013161072 0.102632214 0.061176471 0.007669537 64 810
+0.009395973 0.126771066 0.055294118 0.002732707 37 1050
+0.00507872 0.072033181 0.011750881 0.004560795 32 270
+0.003791869 0.059967699 0.030444965 0.002091581 72 610
+0.00420199 0.063251014 0.024475524 0.002833084 72 10
+0.00373599 0.152641879 0.018648019 0.001049759 37 430
+0.006731879 0.023602792 0.065192084 0.0053187 24 1010
+0.005878648 0.060256141 0.016260163 0.005212988 31 550
+0.010638298 0.262006079 0.025522042 0.005354201 33 770
+0.007911191 0.110246268 0.047453704 0.003011616 32 530
+0.003119993 0.073193355 0.013824885 0.002274588 35 30
+0.010397689 0.038569207 0.066820276 0.008134215 40 7400
+0.01557271 0.038864843 0.099885189 0.012163417 52 230
+0.016053748 0.061739745 0.136311569 0.008140499 14 1050
+0.005932328 0.063937308 0.033218786 0.004068539 21 1310
+0.003839646 0.057269296 0.027272727 0.002416126 31 1570
+0.00653957 0.097871869 0.038505096 0.00307163 72 450
+0.005712366 0.099014337 0.028280543 0.003232223 73 50
+0.005300353 0.141985223 0.014705882 0.003743916 35 1670
+0.017539586 0.216077954 0.048478016 0.009011809 37 210
+0.008565052 0.053682369 0.039325843 0.006820065 21 270
+0.006706671 0.157783269 0.025727069 0.003143336 73 510
+0.004485603 0.043167897 0.027932961 0.003427765 49 3550
+0.008154944 0.183893986 0.021064302 0.005246065 36 270
+0.011658359 0.076539664 0.046357616 0.008782362 4 30
+0.004949985 0.093843457 0.037362637 0.001593263 35 810
+0.029406586 0.160591653 0.13377193 0.009439899 23 510
+0.013224181 0.288098237 0.042622951 0.001326847 73 690
+0.003532945 0.040408055 0.020765027 0.002807308 32 1110
+0.007006369 0.146178344 0.037037037 0.001864976 33 1550
+0.005249344 0.077808822 0.026115343 0.003488799 31 370
+0.008250058 0.107599349 0.055075594 0.002604167 21 930
+0.004703511 0.15605577 0.019375673 0.001990446 36 750
+0.031603571 0.044397766 0.12688172 0.0271769 34 990
+0.013862514 0.088587163 0.122186495 0.003333681 35 590
+0.005170486 0.130519843 0.01391863 0.003857281 67 430
+0.012321859 0.040999781 0.043850267 0.010973937 21 1570
+0.040658094 0.177004539 0.13034188 0.021369485 33 1250
+0.012987448 0.015732218 0.027659574 0.012752933 34 970
+0.004782312 0.058381467 0.023404255 0.003627729 32 1610
+0.019291875 0.0533931 0.123273114 0.013426842 21 1450
+0.010948094 0.052406358 0.050901379 0.00873849 24 690
+0.006810358 0.070722946 0.044444444 0.003946203 25 150
+0.006914894 0.100531915 0.033862434 0.003903016 37 1230
+0.006722198 0.13824346 0.023255814 0.004069866 64 150
+0.021699302 0.013626474 0.051470588 0.02128802 47 1190
+0.011278768 0.145252248 0.039874082 0.006419401 35 990
+0.038924629 0.008856212 0.044025157 0.038879054 54 930
+0.006368483 0.141735782 0.016718913 0.004659189 37 390
+0.002428571 0.068357143 0.01985371 0.001150042 35 1850
+0.012199735 0.090883299 0.073881374 0.006033496 31 890
+0.00211249 0.127145498 0.004153686 0.001815157 37 1010
+0.007573762 0.063553741 0.05388601 0.004430691 21 150
+0.011186714 0.111405836 0.072463768 0.003504218 35 1390
+0.00600551 0.053443526 0.022680412 0.005064028 32 50
+0.010781991 0.115047393 0.07415036 0.002543848 71 1150
+0.006541078 0.127158556 0.034979424 0.002398082 33 1670
+0.005639877 0.080700008 0.020554985 0.004330567 67 390
+0.003651053 0.075584557 0.022610483 0.00210084 35 890
+0.006524103 0.070750272 0.035860656 0.004290506 32 2010
+0.004357832 0.047887186 0.039877301 0.002571355 31 490
+0.017268041 0.253865979 0.062944162 0.001727116 62 650
+0.004504505 0.10832784 0.02535497 0.001971414 35 510
+0.017566843 0.029446793 0.082912032 0.015584256 49 270
+0.01970897 0.182354025 0.065656566 0.00946159 66 310
+0.007984211 0.088813134 0.053535354 0.003544354 71 1030
+0.009254975 0.114645997 0.056508577 0.003136025 32 1670
+0.018287363 0.033286357 0.203629032 0.011905588 14 610
+0.023003195 0.127028754 0.070422535 0.01610306 23 90
+0.015969118 0.040471353 0.103413655 0.01228085 24 1270
+0.006837819 0.061975508 0.043129388 0.004440027 31 190
+0.016336213 0.04471199 0.103103103 0.012275112 34 1070
+0.017827666 0.03304721 0.25974026 0.009559903 45 330
+0.005668934 0.103277675 0.027944112 0.003103448 33 390
+0.0232922 0.025411967 0.089374379 0.021569135 42 1310
+0.005303425 0.087636933 0.033730159 0.002572899 35 110
+0.020013803 0.077448048 0.092079208 0.013963927 63 110
+0.015441911 0.030611318 0.09495549 0.012931034 53 1110
+0.013016412 0.095359366 0.084075173 0.005526014 21 1230
+0.007240795 0.15606224 0.023692004 0.004198613 37 570
+0.004151943 0.089487633 0.031589339 0.001455322 31 1310
+0.014604738 0.086100025 0.082840237 0.008176159 31 1350
+0.004770559 0.076859003 0.024630542 0.003117054 35 950
+0.009049774 0.170437406 0.021632252 0.006464646 64 50
+0.009089532 0.077109529 0.060903733 0.004760341 31 50
+0.006210067 0.060787007 0.038310413 0.004132494 21 850
+0.006361008 0.08409748 0.033398821 0.003878416 32 1570
+0.010636544 0.028116755 0.058651026 0.009247476 22 670
+0.005242709 0.048073772 0.042843233 0.003343824 32 910
+0.028506108 0.036757877 0.124392614 0.024847024 21 1650
+0.005053405 0.118410474 0.025218235 0.002344971 37 250
+0.002140182 0.110433387 0.009689922 0.001202935 72 570
+0.006548835 0.072882191 0.023188406 0.005240772 32 1390
+0.006492559 0.075576306 0.056949807 0.002367424 63 690
+0.007012821 0.073386697 0.054054054 0.00328721 13 1230
+0.005667182 0.133565173 0.021215043 0.003270403 31 1430
+0.004619803 0.129853914 0.017307692 0.00272636 33 1490
+0.014845606 0.123752969 0.101727447 0.002575224 37 530
+0.003041671 0.052874379 0.012464046 0.002515656 35 1110
+0.007259953 0.048899297 0.019157088 0.006648281 31 1450
+0.007689557 0.093705293 0.04389313 0.00394633 33 550
+0.02007874 0.206299213 0.086832061 0.002728175 25 1250
+0.004541326 0.105863357 0.025738799 0.002031603 37 790
+0.008554377 0.069761273 0.02756654 0.0071286 31 970
+0.04869238 0.039661336 0.147058824 0.044629913 46 470
+0.003424322 0.103512376 0.013232514 0.002291826 31 630
+0.016932411 0.149569635 0.096226415 0.00298656 65 70
+0.020168684 0.043230249 0.156456173 0.014010732 32 210
+0.004847794 0.075711129 0.024482109 0.003239491 31 250
+0.019912985 0.177710843 0.065913371 0.00997151 23 690
+0.003622332 0.083943618 0.016885553 0.002406946 67 30
+0.004635157 0.074935038 0.025304592 0.002960826 63 190
+0.006726825 0.175717801 0.028011204 0.00218949 64 710
+0.009648127 0.152099886 0.055970149 0.001338688 37 910
+0.008094645 0.133499377 0.037313433 0.003592986 35 1430
+0.003722909 0.143730887 0.009250694 0.002795031 33 570
+0.006642417 0.085639728 0.019390582 0.005448413 21 530
+0.008765462 0.088883428 0.0359447 0.006114008 25 110
+0.004709815 0.054947838 0.024884793 0.003536788 31 850
+0.003837 0.052795182 0.022079117 0.002820224 35 410
+0.009887967 0.016449706 0.018382353 0.009745899 34 770
+0.004674047 0.134071341 0.017431193 0.002698864 37 270
+0.00535523 0.077829347 0.020183486 0.004103755 31 950
+0.005490345 0.069102613 0.026484018 0.003931937 31 290
+0.010561193 0.025218012 0.097627737 0.008308745 22 1770
+0.006245017 0.145894233 0.029143898 0.002333541 35 1370
+0.009286776 0.081649331 0.046405823 0.005986571 6 170
+0.037345057 0.017505991 0.01631913 0.037719695 47 210
+0.031262977 0.01637302 0.042572464 0.031074724 56 110
+0.014587892 0.201495259 0.057013575 0.003882165 36 910
+0.004287971 0.124802528 0.023508137 0.001547189 31 1950
+0.005077247 0.080510626 0.018918919 0.003865268 31 1370
+0.002800407 0.141675153 0.007187781 0.002076227 33 110
+0.010773605 0.167290139 0.047406082 0.003414196 64 850
+0.031453744 0.098502203 0.104651163 0.023455825 49 1570
+0.003712871 0.08175597 0.025823687 0.001744232 37 350
+0.00738676 0.156515679 0.030276046 0.003139458 36 810
+0.004994649 0.134260911 0.030115146 0.001098901 35 1790
+0.043981844 0.176866489 0.125663717 0.02643088 23 970
+0.008997785 0.156699889 0.040636042 0.003118844 64 830
+0.00405803 0.115045146 0.014109347 0.002751347 35 1770
+0.00862754 0.128959019 0.054577465 0.00182458 71 210
+0.00613859 0.134104592 0.031690141 0.002181322 35 1530
+0.024939551 0.078549223 0.098504837 0.018668466 6 10
+0.013618437 0.070187331 0.115013169 0.00596461 23 270
+0.006665542 0.096101924 0.038630378 0.003267059 31 1470
+0.004017488 0.134704006 0.021052632 0.001365561 68 70
+0.013800425 0.202052371 0.039404553 0.007317073 36 790
+0.015933938 0.132937893 0.083114611 0.005633803 25 1290
+0.003813208 0.063332412 0.022687609 0.002537023 72 190
+0.019246321 0.185508653 0.095030514 0.001985703 71 630
+0.00305499 0.146003055 0.009590235 0.001937696 33 1410
+0.00383574 0.259702166 0.010425717 0.001523926 73 290
+0.029661017 0.103948792 0.150043365 0.015695744 49 150
+0.022257794 0.088116472 0.089965398 0.015715122 49 1230
+0.046726075 0.089732662 0.188255613 0.032774325 25 610
+0.011176338 0.130729284 0.06044905 0.003766234 64 670
+0.006883827 0.082251082 0.047454702 0.003247758 35 470
+0.003528115 0.12800441 0.010335917 0.002528765 37 1270
+0.00996264 0.160785942 0.049913941 0.002308326 71 1090
+0.023082672 0.03354605 0.1302485 0.019362898 22 830
+0.028456618 0.095784812 0.060787671 0.025031743 37 1150
+0.006390442 0.040634877 0.052991453 0.004416609 22 170
+0.00431241 0.112314327 0.018771331 0.002482997 35 210
+0.022938019 0.191150155 0.104680851 0.003620274 25 1130
+0.004525207 0.083504207 0.013547841 0.003703132 31 1330
+0.008060823 0.108637904 0.055649241 0.002260816 71 890
+0.025960023 0.098366094 0.10539629 0.017293717 49 4770
+0.010839425 0.149609277 0.035383319 0.006521417 35 930
+0.015319892 0.047150341 0.156565657 0.008330556 14 350
+0.013919884 0.033195715 0.03697479 0.013128282 53 470
+0.006647199 0.04576193 0.026868178 0.005677471 23 450
+0.005377067 0.080051082 0.020990764 0.004018412 25 650
+0.00696527 0.03847022 0.0620285 0.004762224 13 30
+0.006181534 0.093505477 0.031799163 0.003539059 35 1590
+0.007222761 0.105258522 0.039330544 0.00344556 33 1610
+0.003293242 0.085624284 0.014214047 0.002270592 73 470
+0.002564302 0.09301422 0.010025063 0.001799178 31 410
+0.007630626 0.154811174 0.024227235 0.004590666 33 510
+0.004553638 0.126866462 0.022537563 0.00194057 68 10
+0.008846603 0.200133534 0.038365304 0.001460768 36 430
+0.00641101 0.070521115 0.04587156 0.003417073 32 790
+0.00608828 0.087120388 0.019134775 0.004843192 31 170
+0.004976077 0.076810207 0.028239203 0.003040564 37 1030
+0.007259927 0.132548675 0.037344398 0.002662947 64 170
+0.004007112 0.032083433 0.028949545 0.003180348 32 1550
+0.023163449 0.035107555 0.128819158 0.019319172 24 90
+0.028815743 0.14185311 0.188274154 0.002457002 71 350
+0.015142035 0.075960456 0.068369028 0.010766522 25 490
+0.005528455 0.098780488 0.027983539 0.003067208 31 1510
+0.025257954 0.130911436 0.113300493 0.011996043 23 290
+0.007006617 0.059264305 0.039408867 0.004965346 32 410
+0.041321724 0.04349843 0.268252666 0.031001679 21 1370
+0.00431815 0.085248642 0.009803922 0.003806913 35 250
+0.009269383 0.058373342 0.052117264 0.006613156 31 1230
+0.010261422 0.037564134 0.012195122 0.010185949 52 290
+0.015077605 0.109090909 0.081300813 0.006968641 33 1390
+0.028923661 0.146040778 0.101461039 0.016518601 23 1410
+0.003125 0.094131098 0.012145749 0.002187631 64 310
+0.002980414 0.087709339 0.007281553 0.002566895 31 1610
+0.008701341 0.080863592 0.051779935 0.004911382 21 2030
+0.004271893 0.077710768 0.020210186 0.002928956 31 670
+0.006813255 0.042703279 0.064464142 0.004241553 24 330
+0.008342181 0.188988321 0.032905297 0.002618291 36 950
+0.008869999 0.11549478 0.0544 0.002924893 71 1010
+0.023559424 0.18772509 0.113509193 0.002771107 37 1290
+0.019564199 0.146555764 0.072741807 0.010432395 22 1490
+0.021008403 0.092510688 0.093227092 0.013646333 49 4810
+0.006850168 0.078216465 0.027070064 0.005134441 35 1310
+0.019058296 0.201473414 0.086645469 0.002005616 73 10
+0.012586501 0.063133086 0.059571088 0.009420329 62 770
+0.0060296 0.231500091 0.021310182 0.001426534 36 390
+0.003749677 0.054607361 0.01578532 0.003054479 32 1690
+0.005090161 0.068771322 0.025984252 0.00354713 32 1170
+0.004828073 0.149788036 0.018867925 0.002354571 37 510
+0.058561347 0.157855639 0.278431373 0.017347839 25 410
+0.004705487 0.107301907 0.029757244 0.001694277 72 170
+0.025341481 0.229870597 0.064894449 0.013535589 6 90
+0.003392305 0.055135692 0.031931464 0.001726959 63 830
+0.009281382 0.117993016 0.039719626 0.005209419 23 1270
+0.010749354 0.066408269 0.086381323 0.005369499 23 370
+0.006009375 0.051480309 0.054474708 0.003378949 23 1490
+0.0096285 0.103265666 0.02952603 0.007337151 23 1070
+0.04246848 0.047518985 0.043444531 0.042419786 49 2150
+0.004707379 0.082379135 0.017760618 0.003535529 31 1050
+0.005644546 0.198016781 0.021571649 0.001712003 73 310
+0.011809754 0.052559452 0.036042945 0.010465413 63 270
+0.031890661 0.114245663 0.202453988 0.009891197 32 510
+0.018340015 0.111234326 0.09202454 0.009117958 25 1370
+0.004051805 0.094493886 0.016079632 0.002796644 31 310
+0.031190103 0.227740024 0.117061974 0.005866426 64 650
+0.011351223 0.062218 0.117557252 0.004304887 13 1070
+0.008102592 0.039903744 0.035877863 0.00694819 31 1110
+0.035906697 0.039428141 0.158778626 0.03086323 53 1210
+0.008585371 0.034211007 0.09610984 0.005485004 24 1470
+0.116907001 0.05492163 0.443683409 0.097916943 45 510
+0.005252765 0.051935229 0.02661597 0.004082483 2 90
+0.010764369 0.129863717 0.028136882 0.008171604 23 1330
+0.018825698 0.057793576 0.130599848 0.011969633 14 890
+0.010450939 0.051054771 0.072024261 0.007138195 14 1170
+0.004718604 0.064880806 0.031060606 0.002890933 72 430
+0.003576751 0.13184302 0.012810852 0.002174411 37 770
+0.012384386 0.20849663 0.036090226 0.00613983 37 890
+0.007313344 0.088491457 0.053343351 0.002844639 32 90
+0.029558183 0.207373989 0.099774944 0.011187439 37 450
+0.008242837 0.175062148 0.035874439 0.002379064 64 210
+0.048336167 0.325236823 0.131441374 0.008279338 23 830
+0.001989112 0.141017588 0.008166295 0.000975015 31 930
+0.0692132 0.044619509 0.057863501 0.06974327 49 610
+0.011436914 0.041032972 0.043736101 0.010054874 31 1790
+0.012752103 0.122185041 0.091043671 0.001854523 35 350
+0.013906057 0.139163576 0.077720207 0.003589805 73 430
+0.011545526 0.118429109 0.061299852 0.004861593 23 470
+0.004689893 0.129881317 0.017686072 0.002749973 31 910
+0.004456264 0.097822181 0.027920647 0.001912046 37 290
+0.008669488 0.025669958 0.063736264 0.007218684 24 1190
+0.010174881 0.086868045 0.049048316 0.006476774 23 930
+0.005560354 0.073796156 0.024871982 0.004021682 31 110
+0.008409333 0.162146157 0.024105186 0.005371784 33 1590
+0.022138635 0.148737238 0.086705202 0.010857215 37 90
+0.011356246 0.069288108 0.080144404 0.006235218 31 430
+0.01224432 0.09937009 0.06980057 0.00589391 2 230
+0.018243715 0.122555866 0.112535613 0.005073617 35 230
+0.00920669 0.215896885 0.031982942 0.002935421 36 410
+0.007454201 0.178648136 0.02758133 0.00307645 73 450
+0.005369551 0.111891977 0.024700071 0.002934116 35 270
+0.006858032 0.100325226 0.04298802 0.002829077 25 890
+0.011507907 0.105501522 0.045038705 0.007553121 23 1230
+0.010918788 0.035876019 0.105890603 0.007384792 14 150
+0.055438483 0.232938888 0.200417537 0.011411665 23 110
+0.005232296 0.129815065 0.024322446 0.002384408 33 210
+0.010309278 0.122859334 0.053398058 0.004273919 37 230
+0.006510114 0.167751686 0.028413028 0.002095265 36 730
+0.015085687 0.174149167 0.063063063 0.004968581 64 1050
+0.030087949 0.2240395 0.092975207 0.011930801 23 1530
+0.008372093 0.061606765 0.040494166 0.006263236 21 1070
+0.004830211 0.106850117 0.02739726 0.002130449 72 530
+0.026243932 0.221783981 0.107387141 0.003118908 71 50
+0.002498255 0.053749219 0.017771702 0.001630688 72 290
+0.008298755 0.196091554 0.036177474 0.001498501 36 530
+0.081420581 0.044866897 0.049046322 0.082941346 66 10
+0.027960419 0.047880997 0.050985724 0.026802502 43 330
+0.006506507 0.184434434 0.02578019 0.002147898 68 230
+0.004804759 0.112492373 0.028474576 0.001804589 31 690
+0.017960763 0.101892788 0.084745763 0.010383817 23 1110
+0.005936791 0.128950585 0.032498307 0.002004611 31 750
+0.011830563 0.085295476 0.079161028 0.00555205 40 7000
+0.009242464 0.128012438 0.053306343 0.00277365 31 810
+0.006015615 0.190451811 0.024193548 0.00173913 36 130
+0.006631029 0.030359912 0.063172043 0.004860702 24 890
+0.015126251 0.177346355 0.069845534 0.003329955 36 210
+0.008500249 0.022501545 0.05961152 0.007323692 22 950
+0.00334588 0.089263309 0.015394913 0.002164928 31 790
+0.038003148 0.168315718 0.105544422 0.02433419 23 310
+0.00915705 0.188283994 0.031978681 0.003863391 25 130
+0.020677452 0.075985844 0.146373919 0.010340866 31 70
+0.011307468 0.033922405 0.028609448 0.010699935 43 1050
+0.011769964 0.112336114 0.069628647 0.004447801 33 190
+0.017149984 0.094044278 0.105437666 0.007985131 23 730
+0.005392973 0.119676422 0.021206097 0.003243243 21 70
+0.017177818 0.118754412 0.091809775 0.007120605 33 1430
+0.029080794 0.193071185 0.115913556 0.008304607 71 690
+0.008018549 0.148101633 0.035877365 0.003175323 31 1890
+0.006112781 0.078192654 0.039087948 0.00331565 35 190
+0.008357869 0.146072751 0.035760728 0.003670337 63 170
+0.003195217 0.159142445 0.011010363 0.001716107 31 90
+0.004721635 0.10880902 0.025906736 0.002135062 33 1570
+0.007528681 0.061543977 0.070550162 0.00339573 31 1030
+0.00828877 0.137878788 0.018099548 0.006719735 35 390
+0.006805527 0.106413694 0.0374677 0.003154089 61 70
+0.004617968 0.162468514 0.01744186 0.002130326 33 1170
+0.007413911 0.151594966 0.028314028 0.00367943 33 1650
+0.063793636 0.120481928 0.145512821 0.052599227 66 70
+0.046977354 0.295526858 0.110829639 0.020191286 23 950
+0.011617429 0.050878548 0.055028463 0.00929034 25 430
+0.017878278 0.098860026 0.111531191 0.007604037 2 70
+0.004576321 0.234278122 0.012602394 0.002120686 33 650
+0.00711257 0.217480509 0.016981132 0.004369865 64 910
+0.026498638 0.108378747 0.149591452 0.011536405 33 790
+0.01504748 0.232578524 0.054648241 0.003045879 36 690
+0.011746609 0.222766047 0.040175769 0.003598417 33 290
+0.020048184 0.138100155 0.093457944 0.008285914 25 470
+0.008570975 0.042458071 0.097819315 0.004613642 24 1690
+0.00722104 0.1222256 0.032960199 0.003636993 64 270
+0.043395901 0.278629241 0.12855377 0.0105037 36 470
+0.004887027 0.102754506 0.029647931 0.002051355 61 250
+0.012071567 0.174498814 0.046324892 0.004830918 35 370
+0.025604297 0.289883617 0.075972823 0.005042864 36 830
+0.005180558 0.082482604 0.032635468 0.002712427 72 590
+0.005675878 0.122031367 0.021419829 0.003487581 37 110
+0.008859674 0.093455273 0.048929664 0.004728878 31 1910
+0.004055556 0.091222222 0.021924482 0.00226189 37 50
+0.010673328 0.186442602 0.045066991 0.002791347 33 810
+0.003086593 0.09231719 0.019452888 0.001422035 62 50
+0.00327718 0.094635773 0.015795869 0.001968629 31 270
+0.011013376 0.059828438 0.121506683 0.003982061 24 770
+0.025923178 0.016969585 0.035822708 0.025752288 54 650
+0.039479287 0.100249407 0.029126214 0.040632817 41 30
+0.012886959 0.076994911 0.055791389 0.009307972 25 1310
+0.061189205 0.062053672 0.113264688 0.057743939 21 1030
+0.004058442 0.103084416 0.023016354 0.001879568 31 1970
+0.00724471 0.063554125 0.045235223 0.004666394 31 990
+0.006860475 0.071434697 0.039015606 0.004386775 31 650
+0.007038949 0.156733928 0.026946108 0.003338898 37 670
+0.003732982 0.091677646 0.021556886 0.001934002 72 710
+0.006846745 0.156818608 0.023325359 0.00378198 33 1010
+0.00995658 0.125393023 0.063880597 0.002225456 68 330
+0.018138918 0.247308657 0.061419201 0.003918495 36 250
+0.002318718 0.088585582 0.015466984 0.001040763 68 250
+0.032012195 0.116962306 0.096563981 0.023462021 25 290
+0.022792023 0.150373932 0.133806986 0.003143666 71 390
+0.007074865 0.043503988 0.029568303 0.006051803 22 1570
+0.014622415 0.081481481 0.054899646 0.011049434 31 1250
+0.010029299 0.063744272 0.040070713 0.007983952 21 750
+0.008522727 0.160700758 0.032999411 0.003836173 33 1210
+0.020288271 0.115991529 0.150176678 0.003245499 62 750
+0.013625932 0.244406196 0.03286385 0.007403189 36 510
+0.01600674 0.131270583 0.102683781 0.002909283 62 870
+0.009009844 0.031793997 0.124198251 0.005227281 45 170
+0.010618793 0.131245225 0.051804424 0.004396764 25 530
+0.008763943 0.196562713 0.034163289 0.002549936 36 450
+0.004843305 0.123504274 0.017877739 0.003006663 31 230
+0.003746254 0.216533467 0.007497116 0.002709595 36 630
+0.009055501 0.169230769 0.041426928 0.002461322 64 410
+0.006801628 0.093080548 0.037974684 0.00360222 31 830
+0.011563495 0.060206343 0.126509488 0.004199668 13 170
+0.019004155 0.118929228 0.122565865 0.005025126 62 430
+0.003272105 0.129917454 0.009730967 0.002307692 31 1650
+0.015350684 0.154300838 0.064608348 0.006363447 33 90
+0.00438293 0.201730104 0.017724414 0.001011415 33 1330
+0.007746716 0.19714831 0.0261959 0.003216333 71 570
+0.021506575 0.049058391 0.180011357 0.013329427 14 270
+0.008819248 0.151125953 0.039093484 0.003429494 33 1510
+0.00868739 0.240396362 0.027103331 0.002859185 36 370
+0.072286912 0.165440149 0.103040541 0.066190423 62 550
+0.022426529 0.094542169 0.117481731 0.012501467 13 490
+0.007829878 0.15837708 0.043258427 0.001162914 37 810
+0.008374207 0.213781553 0.027979855 0.003043214 33 1130
+0.010964254 0.268549114 0.032438479 0.003080082 36 570
+0.010741019 0.050004174 0.084028937 0.006883421 21 570
+0.00560656 0.126287757 0.023862375 0.002967835 73 650
+0.005626283 0.074045175 0.024958403 0.004080365 31 1390
+0.004635375 0.065880721 0.038227147 0.002266245 31 1270
+0.006467422 0.072587772 0.018815717 0.005500931 43 1270
+0.005965796 0.239957577 0.01878453 0.001918716 36 230
+0.014717968 0.135674262 0.060022026 0.007606535 23 1650
+0.008084728 0.147465438 0.04002193 0.002560455 33 630
+0.00451957 0.164964295 0.017534247 0.001948474 37 830
+0.006646471 0.082877425 0.044189853 0.003253796 35 790
+0.006105556 0.035948416 0.034295046 0.005054401 21 190
+0.017480546 0.103755498 0.073913043 0.010947527 6 30
+0.007739938 0.158410733 0.03257329 0.003065604 33 330
+0.005021584 0.081182275 0.034183397 0.002444988 63 50
+0.00316168 0.072955776 0.023293608 0.001577354 31 1690
+0.006931642 0.136273136 0.035714286 0.002390506 35 1510
+0.010846181 0.166009322 0.044816415 0.004084265 37 370
+0.016444771 0.050532277 0.052801724 0.014509793 48 190
+0.009611061 0.104372752 0.054927302 0.004330091 31 1710
+0.034622994 0.204440536 0.089784946 0.020447637 43 870
+0.024018986 0.076271533 0.0472103 0.022104097 35 670
+0.032057911 0.214753533 0.144997325 0.001170618 65 330
+0.018094349 0.134271559 0.098395722 0.005639877 25 570
+0.003430532 0.123565114 0.016017085 0.001656003 31 350
+0.011450382 0.07876856 0.075612354 0.005964305 23 570
+0.005138532 0.130754809 0.018587361 0.003115514 63 570
+0.007930607 0.058457249 0.067302597 0.004244398 24 1430
+0.022933205 0.151711972 0.135306554 0.002835807 24 550
+0.004507986 0.08140134 0.017405063 0.003365115 21 1410
+0.008168302 0.112005641 0.037775446 0.004433856 33 430
+0.062298604 0.081933405 0.398531725 0.032291277 45 1050
+0.010189569 0.037270832 0.048092002 0.008722224 21 10
+0.001781322 0.163711935 0.005181347 0.001115732 37 1250
+0.008623489 0.014581398 0.100983946 0.007256816 14 1330
+0.008364953 0.166695412 0.031557165 0.003725551 37 1090
+0.007767398 0.203841713 0.027806385 0.002636783 36 50
+0.038433275 0.14271017 0.138516993 0.021772673 33 1030
+0.011305638 0.057863501 0.094871795 0.006173228 13 250
+0.009211654 0.104648672 0.061412487 0.00311042 22 1270
+0.008342531 0.063688892 0.02038736 0.007523228 43 950
+0.00405225 0.094059878 0.028890015 0.001473452 68 210
+0.01305947 0.081869653 0.110550227 0.004366223 24 1030
+0.002725232 0.158945175 0.008068583 0.001715429 33 230
+0.021802491 0.075059616 0.227433182 0.005115404 13 230
+0.007794027 0.086872756 0.052419355 0.00354848 73 750
+0.01221539 0.055010803 0.029707956 0.011197092 52 310
+0.028399058 0.293849323 0.064096144 0.013544488 23 130
+0.006419507 0.099477482 0.027013507 0.004144562 33 450
+0.023748339 0.177137794 0.108554277 0.005492139 13 790
+0.009465577 0.117584808 0.0495 0.004130855 25 930
+0.013186222 0.134755113 0.083874189 0.002177125 33 1470
+0.005913866 0.139428094 0.024451098 0.002910502 35 1070
+0.007789606 0.074891502 0.027736503 0.00617482 61 190
+0.002830623 0.133105128 0.011374876 0.001518718 31 210
+0.004116921 0.166982297 0.012820513 0.002372245 33 730
+0.052910663 0.234466859 0.177974435 0.014606234 36 290
+0.004692588 0.196131009 0.019042969 0.001191327 36 610
+0.010688506 0.071141033 0.065853659 0.006463424 23 670
+0.006075013 0.108293714 0.031219512 0.003021327 32 1130
+0.004491474 0.156744823 0.01457018 0.002618037 33 1730
+0.037567831 0.286767775 0.100436681 0.012290285 23 30
+0.004951468 0.081109758 0.028585271 0.00286533 73 70
+0.043805476 0.051606451 0.20494186 0.035037305 56 810
+0.007062308 0.117724114 0.038703435 0.002840359 33 910
+0.013424191 0.141260647 0.044380125 0.008332011 2 210
+0.012675176 0.092787646 0.062049062 0.007625326 25 1230
+0.016764634 0.014982538 0.041726619 0.016384952 54 370
+0.017481867 0.097312628 0.085045389 0.0101983 35 10
+0.020644065 0.02615498 0.060678452 0.019568844 42 1190
+0.015696692 0.126479111 0.044391408 0.011541917 23 550
+0.009296029 0.189440433 0.033825631 0.003563078 33 1070
+0.003630778 0.146767211 0.015223597 0.001636661 33 1050
+0.005425133 0.207634642 0.014726841 0.002987676 36 970
+0.00562887 0.131715554 0.014719848 0.004249802 33 1270
+0.009600357 0.094128154 0.028937381 0.007591068 23 1170
+0.008311485 0.117699559 0.028436019 0.005626857 62 590
+0.015786877 0.095214603 0.092793217 0.007683157 25 810
+0.029600428 0.142389416 0.169873299 0.006310869 62 10
+0.004418763 0.10371953 0.019194757 0.002708853 67 50
+0.018742679 0.209195627 0.075128325 0.003826688 36 1030
+0.0056111 0.109569476 0.027467412 0.002921632 31 1090
+0.006356059 0.129039995 0.026022305 0.003442341 31 470
+0.01683423 0.08432702 0.075785582 0.011405226 49 4910
+0.008455101 0.093873304 0.041570439 0.005024404 2 130
+0.019263285 0.130857488 0.098754038 0.007295213 25 670
+0.010163609 0.134853743 0.045036765 0.004727794 32 1150
+0.006547388 0.141190198 0.036271809 0.001660628 64 290
+0.012482663 0.100924642 0.05542831 0.00766185 6 250
+0.025115612 0.174294371 0.107959744 0.007628428 25 850
+0.003871757 0.148689037 0.015989036 0.001755366 31 1410
+0.051198784 0.050438748 0.218721461 0.042300323 56 190
+0.014659205 0.091797528 0.018198362 0.014301481 31 150
+0.013129458 0.039640516 0.147660154 0.007576468 24 30
+0.012718601 0.291865395 0.028597367 0.006173994 33 690
+0.014836795 0.092155306 0.110657596 0.005110027 13 1210
+0.028651116 0.080194147 0.159891599 0.017208789 13 310
+0.023109405 0.084990403 0.176603433 0.008852156 12 330
+0.02266521 0.120972146 0.085327314 0.014041628 49 1490
+0.028494565 0.122048973 0.10211426 0.018260271 49 2850
+0.013082677 0.025477999 0.045413669 0.012237413 22 1630
+0.014610983 0.136003896 0.081468218 0.004086809 35 1410
+0.029429013 0.119155515 0.134675615 0.015191865 22 1650
+0.013672165 0.166815277 0.044097996 0.007580487 37 1350
+0.003321399 0.096881335 0.014247551 0.002149305 72 110
+0.025623849 0.377491208 0.062555457 0.00322841 33 750
+0.018618278 0.106842453 0.078726227 0.011427967 35 1190
+0.009000834 0.055550105 0.054304636 0.006336181 23 910
+0.004272836 0.118128289 0.016321129 0.002658946 31 1490
+0.010491406 0.072419401 0.076177895 0.005363036 21 1050
+0.016883432 0.1845868 0.070459519 0.0047553 25 1070
+0.016988094 0.079857968 0.106364429 0.009231235 13 1130
+0.018449036 0.037684531 0.075217391 0.016225972 52 10
+0.007748018 0.041738157 0.067935958 0.005126465 22 390
+0.017726318 0.119319637 0.089308716 0.008027923 24 1230
+0.017740373 0.070689029 0.14732334 0.007883507 24 590
+0.007314454 0.156690377 0.033404711 0.002466778 21 630
+0.005245839 0.111497926 0.028229256 0.002361655 61 30
+0.018725676 0.043956457 0.193755346 0.010678256 14 750
+0.006084605 0.07555856 0.051129101 0.002402925 63 10
+0.011956096 0.038367307 0.092379736 0.008747346 49 3090
+0.012843251 0.14396673 0.061597281 0.004643852 35 1550
+0.017577263 0.055030534 0.037974684 0.016389414 49 4530
+0.008577671 0.03501934 0.064080944 0.006563447 24 230
+0.012922724 0.124433328 0.055276382 0.006903529 25 1210
+0.011517125 0.144295707 0.045549519 0.00577831 33 850
+0.017498698 0.073365818 0.09273183 0.01154215 52 30
+0.007787054 0.089775748 0.043786489 0.004236417 71 250
+0.006669943 0.168503826 0.025 0.002955332 33 930
+0.00770937 0.065110366 0.064395513 0.003761465 21 1790
+0.012380453 0.158391196 0.073200993 0.000933998 36 150
+0.017309388 0.112255789 0.098801158 0.007004705 13 390
+0.005326232 0.107634265 0.026804124 0.002735638 33 990
+0.013353482 0.074909024 0.065870729 0.00910091 32 1030
+0.00985784 0.253605894 0.031914894 0.002363409 33 1350
+0.00421372 0.206050902 0.012678937 0.002016771 31 1190
+0.00454194 0.118477 0.02324633 0.002028064 35 530
+0.007392049 0.221400883 0.020765472 0.003589209 33 950
+0.005976226 0.161555132 0.026422764 0.0020365 33 830
+0.025912788 0.106852397 0.184210526 0.006974717 14 1270
+0.009299912 0.099066017 0.068896052 0.002746766 72 350
+0.025692695 0.250680101 0.092041801 0.003496033 64 90
+0.008982613 0.09622576 0.029647436 0.006782408 25 1110
+0.007866689 0.21147014 0.0264 0.002896374 36 90
+0.007835998 0.2010448 0.032283465 0.001684169 36 710
+0.009234251 0.092743996 0.072805982 0.002735648 71 70
+0.01578669 0.042235594 0.028962818 0.015205647 41 970
+0.027890296 0.066255579 0.321261682 0.007073582 14 370
+0.006472595 0.081943692 0.024513619 0.004862293 31 570
+0.012722132 0.260096931 0.020186335 0.010098253 33 590
+0.06713299 0.113501386 0.086821705 0.064612178 23 870
+0.006220156 0.16235235 0.026315789 0.002325233 63 550
+0.010386843 0.172581397 0.053626543 0.001367989 64 490
+0.025181418 0.160888172 0.144564379 0.002291374 71 1130
+0.007909216 0.224209078 0.018404908 0.004875887 33 10
+0.017038875 0.215715467 0.059432515 0.005378612 36 490
+0.007031656 0.035678131 0.026840491 0.006298766 51 1170
+0.015914564 0.091194639 0.070034443 0.010483871 21 1350
+0.00692955 0.216795908 0.025114155 0.001895934 73 490
+0.00920979 0.066936346 0.088179399 0.003544649 13 770
+0.010004634 0.07188616 0.045127038 0.007284262 67 490
+0.028950006 0.098207327 0.244142101 0.005515084 14 1150
+0.048951277 0.086242835 0.283232628 0.026839179 56 510
+0.008184319 0.182324622 0.022255753 0.005046682 33 70
+0.009254749 0.161836337 0.040255831 0.003268923 36 770
+0.005059433 0.081164279 0.02966579 0.002885859 21 1770
+0.00785468 0.044806997 0.051426426 0.00581078 51 370
+0.006734007 0.14258992 0.026236882 0.003490619 33 1290
+0.012279452 0.111981895 0.089446108 0.002548492 73 710
+0.010405497 0.106795345 0.049088881 0.005780347 25 210
+0.050190363 0.110083105 0.073261435 0.047336461 33 1310
+0.038377927 0.024683318 0.13563731 0.035916486 56 390
+0.009793168 0.105531181 0.07498144 0.002102128 62 130
+0.007216653 0.059334489 0.030683919 0.005736405 44 510
+0.033104361 0.225440702 0.137298091 0.002778075 67 70
+0.007884122 0.250183352 0.020520337 0.00366793 33 250
+0.020495532 0.146572484 0.10332238 0.006270379 33 150
+0.008479924 0.042261108 0.04806992 0.006732979 21 1150
+0.030546244 0.129786129 0.19570753 0.005913628 13 990
+0.029999489 0.140747176 0.193899782 0.003152323 35 1570
+0.010827393 0.249390959 0.030752533 0.004207236 36 190
+0.011780839 0.172411644 0.052783803 0.003238683 72 90
+0.005481058 0.095591023 0.032455824 0.002629974 33 1090
+0.028521301 0.125106842 0.127651924 0.014345948 12 370
+0.047966401 0.308134394 0.143113343 0.005591054 23 1310
+0.077384472 0.078539044 0.210469702 0.066041196 56 450
+0.006350267 0.15625 0.013547237 0.005017495 25 450
+0.020382977 0.11178789 0.154202279 0.003540854 62 690
+0.013748123 0.131803679 0.054467782 0.007566341 25 330
+0.010524763 0.138543255 0.041888534 0.005480703 33 1530
+0.006225114 0.158095452 0.027669386 0.002198241 64 630
+0.047606957 0.363463247 0.119455117 0.006581572 25 510
+0.005363887 0.091503475 0.032604925 0.002620179 21 1950
+0.007215007 0.189557917 0.027681661 0.00242797 36 1050
+0.011144618 0.189524059 0.042199931 0.003882553 33 410
+0.021880387 0.191685453 0.089242477 0.005905996 23 1050
+0.008785942 0.177992136 0.033828098 0.00336348 33 50
+0.044575156 0.057979713 0.119047619 0.039991505 14 1210
+0.005099644 0.195262699 0.017525773 0.002084549 36 30
+0.006962716 0.093435154 0.038804945 0.003680895 23 50
+0.003352923 0.027492089 0.034258308 0.002479251 32 1730
+0.018966665 0.088329603 0.148972603 0.006370695 13 530
+0.022093994 0.22261028 0.087653479 0.003320637 36 550
+0.020217067 0.157320707 0.110585052 0.003346171 71 930
+0.04593998 0.176540779 0.235890504 0.005216635 71 310
+0.035482327 0.203322623 0.108944183 0.016733888 33 30
+0.005423316 0.046574871 0.050671141 0.003212957 32 1770
+0.013265179 0.074404688 0.084115282 0.007569839 24 1730
+0.032808965 0.160812412 0.094137353 0.021056686 4 10
+0.04484554 0.207219715 0.141373534 0.019614711 23 70
+0.010247027 0.273193047 0.026456798 0.004154079 33 710
+0.0180927 0.098292707 0.109437751 0.00813542 21 210
+0.003816879 0.067610098 0.018818092 0.002729101 72 390
+0.025936346 0.267540004 0.085113375 0.00432121 36 890
+0.008178958 0.10695561 0.024183007 0.006262231 43 990
+0.023382715 0.073426992 0.119621904 0.015756167 21 1990
+0.004760605 0.093150186 0.020182292 0.003176514 71 1110
+0.006731477 0.02307935 0.021158854 0.006390636 53 1430
+0.016094228 0.276389139 0.035458686 0.008697813 36 670
+0.026126889 0.137233047 0.067251462 0.019585551 12 90
+0.018906942 0.101033973 0.109811566 0.008690255 6 270
+0.02994152 0.180409357 0.151377634 0.003210846 71 610
+0.030117331 0.230849712 0.114923924 0.004663817 25 1190
+0.013576951 0.142627025 0.064214263 0.005153256 71 790
+0.008932989 0.04150898 0.100515464 0.004966864 14 810
+0.005953033 0.113399803 0.025120773 0.003501401 33 130
+0.02753127 0.217175599 0.103281853 0.006516112 23 1010
+0.01264266 0.153138528 0.048185031 0.00621551 25 1090
+0.005825617 0.120524691 0.032010243 0.002237235 25 1270
+0.035016964 0.200115229 0.04190659 0.033293317 36 990
+0.024799565 0.214431309 0.108048162 0.002075765 73 370
+0.008662838 0.103789992 0.060069554 0.002709432 31 330
+0.007009227 0.023071738 0.058749607 0.005787295 49 4390
+0.048562373 0.180439212 0.218946048 0.011049724 56 90
+0.006059938 0.17562803 0.026662484 0.001670676 33 270
+0.009089081 0.128896039 0.056474259 0.002077562 37 130
+0.013451463 0.09436628 0.105115409 0.003900156 13 1090
+0.009384353 0.054277067 0.049844237 0.007062273 21 1130
+0.017230033 0.192701167 0.058987892 0.007262487 25 690
+0.005598856 0.079765193 0.038651824 0.002733852 62 410
+0.01588508 0.246104735 0.049413218 0.004940014 33 890
+0.015748903 0.120200642 0.066891685 0.008761633 25 1350
+0.00337622 0.074641847 0.018948655 0.002120106 49 1410
+0.005742432 0.0789284 0.045357686 0.002347724 21 950
+0.044085816 0.1048192 0.305800182 0.013440956 14 590
+0.004219739 0.128975541 0.021508634 0.001659714 73 150
+0.0134389 0.115255061 0.084664055 0.004160452 33 1690
+0.032255593 0.127413423 0.120264582 0.019404689 25 970
+0.007064708 0.208815255 0.028443114 0.001422363 73 90
+0.010144655 0.125568289 0.052363854 0.004081983 23 1590
+0.006559384 0.146361728 0.029279952 0.002663798 33 470
+0.043458311 0.110808862 0.238095238 0.019203126 13 510
+0.069390705 0.144819082 0.145625367 0.056480883 62 710
+0.042255595 0.168188064 0.177414648 0.014927134 25 830
+0.057302793 0.093106899 0.111821086 0.05170563 13 190
+0.028518193 0.178717458 0.099333913 0.013108142 33 970
+0.013116112 0.159878503 0.049510651 0.006190085 33 1710
+0.075711871 0.07444965 0.201724138 0.065575665 56 470
+0.019964349 0.11281802 0.044240161 0.016877329 25 550
+0.012937743 0.169455253 0.04391504 0.006617475 33 1630
+0.017695404 0.146511238 0.105609616 0.002603911 64 1110
+0.02625416 0.143596697 0.096709585 0.014440606 25 1150
+0.035870459 0.045869284 0.151678998 0.03030303 49 2450
+0.01437659 0.226463104 0.055617978 0.002302632 63 790
+0.029633674 0.088652834 0.255319149 0.007679739 14 650
+0.008933313 0.111603936 0.045899804 0.004289431 31 1870
+0.023583696 0.306588054 0.071348941 0.002464572 25 70
+0.005255023 0.138910355 0.023087622 0.002378281 33 490
+0.007591717 0.143566914 0.035160576 0.002970251 72 670
+0.011691816 0.180673529 0.043971239 0.004573729 33 350
+0.025721315 0.272273168 0.079956188 0.005429772 25 990
+0.009158524 0.107102231 0.059640523 0.003103257 24 430
+0.010274081 0.116365821 0.053518066 0.004579279 23 1550
+0.016573331 0.278946572 0.056158437 0.001259446 68 370
+0.008218532 0.1245916 0.022979184 0.006117738 2 290
+0.033045782 0.125832088 0.094466937 0.024204515 25 170
+0.015227736 0.252277362 0.049043385 0.003818529 36 590
+0.011852228 0.072480181 0.118265086 0.003536693 14 390
+0.035842097 0.102240819 0.185783522 0.018766098 56 610
+0.02134729 0.109353406 0.165367034 0.003664576 24 1330
+0.006026673 0.14487344 0.02979066 0.002000637 73 670
+0.018275735 0.098694216 0.106801275 0.008582068 13 1150
+0.012647638 0.186761811 0.05085639 0.00387292 36 930
+0.00665048 0.201150639 0.019155077 0.003501817 33 670
+0.007261262 0.182534754 0.027479717 0.002746611 25 950
+0.015261493 0.142304113 0.088673816 0.00308133 21 370
+0.006441191 0.077213529 0.038964435 0.003719831 31 610
+0.015018125 0.284012725 0.04688721 0.002376524 64 230
+0.013204275 0.071616405 0.108333333 0.005865927 24 1570
+0.022449704 0.195864795 0.082276843 0.007877489 4 190
+0.005142263 0.023504884 0.039565555 0.004313672 53 1090
+0.032043411 0.153800452 0.182333247 0.004727579 3 10
+0.015305705 0.045975534 0.079542567 0.012210057 24 170
+0.006910532 0.09064314 0.033545108 0.004255643 43 1030
+0.023496963 0.137664968 0.115901598 0.008745293 12 190
+0.024747328 0.165661523 0.127542054 0.004336989 13 730
+0.010939956 0.250613015 0.02734571 0.005453478 33 1150
+0.017875177 0.134686595 0.086978255 0.007119238 25 1410
+0.008934047 0.03681633 0.066882148 0.006719063 40 7600
+0.010672791 0.181527515 0.035971223 0.005061898 6 50
+0.01500377 0.051605812 0.086428925 0.01111725 22 1670
+0.007126191 0.073685543 0.032891507 0.005076638 31 130
+0.011268061 0.220347207 0.040127233 0.003111818 71 450
+0.04089898 0.085847988 0.263505255 0.019994032 56 490
+0.006945906 0.123372523 0.030709237 0.003601564 25 350
+0.031468798 0.157229833 0.097531462 0.019143941 23 1570
+0.010821867 0.154183711 0.03159672 0.007034822 12 290
+0.028654503 0.174792466 0.054966249 0.023081244 6 110
+0.004400273 0.157499431 0.019508671 0.001575867 36 1010
+0.006591143 0.142911088 0.032428537 0.002283014 21 1110
+0.010911425 0.127269393 0.048270893 0.005463333 73 410
+0.043506105 0.177824268 0.159183673 0.018486784 5 90
+0.018123584 0.163346614 0.088474414 0.004388422 14 470
+0.015084175 0.187968575 0.06973967 0.002432552 65 310
+0.021639617 0.102494431 0.118939575 0.010528038 23 1150
+0.020562942 0.133341795 0.084245597 0.010764893 2 250
+0.027668723 0.262665919 0.088967972 0.005831643 25 30
+0.026190357 0.212232201 0.062174941 0.016495765 71 550
+0.014901088 0.091033656 0.088193791 0.007560769 25 710
+0.024216576 0.154118625 0.111397749 0.008332265 4 50
+0.034490349 0.072739587 0.216713222 0.020195749 56 330
+0.006980725 0.027494986 0.082926829 0.004833551 14 710
+0.01160339 0.060278984 0.08593931 0.006835067 40 7100
+0.009730097 0.054320087 0.045128442 0.007696807 34 210
+0.009880307 0.081545468 0.084006462 0.003298978 24 1390
+0.051938357 0.231163014 0.16816609 0.016992648 23 1090
+0.005172826 0.129769166 0.023502304 0.002439527 73 110
+0.007628735 0.213702382 0.027231121 0.002301138 31 1670
+0.041438929 0.122866704 0.100317749 0.033191327 13 330
+0.013443198 0.06509338 0.137681159 0.004793063 13 1010
+0.016770504 0.062971181 0.112039756 0.010368121 21 1830
+0.004476716 0.121608342 0.018630752 0.00251717 31 450
+0.018545323 0.206214945 0.054178548 0.009288279 23 1510
+0.01111497 0.144147273 0.056736035 0.003431228 21 110
+0.022666512 0.133064047 0.130377812 0.006134147 13 210
+0.023137782 0.173085535 0.081716257 0.010876433 25 190
+0.003181471 0.118427081 0.013754567 0.001761122 67 570
+0.016014235 0.172709075 0.070186735 0.004704933 25 50
+0.017289919 0.134281246 0.079415502 0.007653648 4 90
+0.018965873 0.139604165 0.108176632 0.004490915 14 1230
+0.008016969 0.054180114 0.039570617 0.006209458 61 130
+0.042489286 0.175561335 0.155017734 0.018526746 33 370
+0.037891426 0.229969973 0.146735751 0.005384996 56 290
+0.023184435 0.122441215 0.120778952 0.00956758 6 210
+0.009590607 0.108454523 0.053305785 0.004272752 25 630
+0.012885188 0.297818027 0.037142857 0.002596728 33 170
+0.022889531 0.267208022 0.047521925 0.013907482 6 190
+0.019895049 0.112092174 0.140850804 0.004625228 14 830
+0.030972529 0.205094001 0.092276423 0.015155488 23 630
+0.02052683 0.123336424 0.120300752 0.006489794 32 370
+0.00825444 0.193998905 0.025811656 0.00402854 36 350
+0.005362513 0.090311243 0.034102307 0.00250931 31 1550
+0.01533332 0.103031675 0.095485417 0.006126523 25 390
+0.013787282 0.117543613 0.07839617 0.005181347 25 270
+0.003076625 0.122532038 0.017002768 0.00113194 37 990
+0.015546973 0.124818578 0.075285771 0.007027012 21 910
+0.009746297 0.109603095 0.04875195 0.00494491 33 1450
+0.005389812 0.097775753 0.027950311 0.002944889 71 650
+0.023544706 0.093167702 0.108914729 0.014773813 25 90
+0.012528588 0.028839181 0.043520309 0.011608271 44 1210
+0.00914579 0.064237429 0.050084412 0.006335471 43 310
+0.015903801 0.138169123 0.063634662 0.008251568 49 1670
+0.022644055 0.257660436 0.082273747 0.00194704 33 610
+0.031265177 0.324854298 0.087647169 0.004136319 23 710
+0.005340135 0.214043757 0.01284677 0.003295812 73 350
+0.015885379 0.110193289 0.079799107 0.00797033 25 870
+0.036198736 0.142944023 0.124121347 0.021534569 6 70
+0.009675464 0.126060804 0.059552432 0.002480992 25 1330
+0.030919692 0.203422983 0.115569527 0.009302545 25 750
+0.037337099 0.134978445 0.214023495 0.009766856 12 410
+0.015815796 0.089584957 0.152075403 0.002407833 13 150
+0.022253502 0.123265634 0.157150625 0.003287462 13 690
+0.016998422 0.153151906 0.081164136 0.005394096 21 730
+0.008072827 0.191721058 0.024189213 0.004250074 73 570
+0.006636792 0.316637359 0.017377284 0.001660164 72 70
+0.014250219 0.082680503 0.102881386 0.006261651 24 290
+0.034803371 0.159213483 0.200776288 0.003374315 13 370
+0.020963694 0.101177219 0.14884696 0.006568338 56 690
+0.026798749 0.299217935 0.06603938 0.010043896 23 330
+0.020096302 0.19226911 0.087826087 0.003974168 71 870
+0.007846161 0.038899373 0.075986785 0.005088253 54 1570
+0.008541994 0.099449516 0.052923824 0.003640823 71 290
+0.022297607 0.105675254 0.171612008 0.00465431 71 1070
+0.033207415 0.253360274 0.113730929 0.005883045 23 430
+0.014455543 0.203292065 0.056228374 0.003796574 33 1190
+0.017122342 0.114293605 0.036416983 0.014632517 72 470
+0.024321231 0.092659121 0.211871676 0.005168276 14 190
+0.020979389 0.102303001 0.148706527 0.006423398 13 750
+0.008376306 0.153574327 0.038009204 0.002999753 72 50
+0.005072203 0.111285652 0.026020408 0.002449049 23 1610
+0.019441131 0.150021573 0.099306378 0.005344879 13 350
+0.016288653 0.192381323 0.077247428 0.001767778 71 950
+0.007452037 0.086483993 0.0595 0.002524575 23 250
+0.031333312 0.188752627 0.138584247 0.006379277 4 170
+0.021473979 0.183076315 0.096361522 0.00469134 2 170
+0.047119089 0.256170357 0.120475954 0.021855435 1 130
+0.040514697 0.143761067 0.155362128 0.021232008 13 1170
+0.024980507 0.182870509 0.126598885 0.002238696 24 850
+0.017114534 0.106175739 0.125406636 0.004250715 13 90
+0.01576282 0.207130161 0.063147668 0.003383952 13 1050
+0.024582262 0.14187018 0.14382786 0.004868002 62 1230
+0.046430202 0.235462019 0.182802651 0.004430285 71 470
+0.009226849 0.090745324 0.064880568 0.003672503 31 1130
+0.028263906 0.129672993 0.160779344 0.008519978 62 1010
+0.020453932 0.131320473 0.124604681 0.004709201 22 910
+0.008623554 0.105476539 0.053717706 0.003306338 73 770
+0.007708165 0.190732276 0.036967419 0.000812198 64 130
+0.019970507 0.161338699 0.084740511 0.007510282 21 1170
+0.020984787 0.109130991 0.130454896 0.007574756 14 50
+0.006629928 0.055006359 0.059802713 0.003534837 22 30
+0.007687217 0.165173344 0.035598706 0.002164832 36 170
+0.031899698 0.259403269 0.08993576 0.011571842 23 410
+0.015936953 0.191564507 0.031235715 0.012311803 6 230
+0.008367923 0.111489291 0.057483246 0.002204989 25 1050
+0.102027469 0.341449917 0.284661854 0.007333843 5 10
+0.031233924 0.167315842 0.166910688 0.00397164 3 110
+0.019254045 0.075994626 0.083430742 0.013975846 52 50
+0.027852873 0.130621026 0.174610115 0.005803132 12 150
+0.013349728 0.095042495 0.108615297 0.003344532 23 750
+0.011494518 0.163042477 0.048460017 0.004293499 23 1390
+0.023398449 0.191321499 0.098291202 0.00567992 71 530
+0.008082711 0.031927157 0.049171115 0.006727611 49 1130
+0.015756539 0.16028902 0.076955484 0.004074521 4 150
+0.005224137 0.111380486 0.026681646 0.002534631 31 1630
+0.027504724 0.136454735 0.162400336 0.006188939 13 910
+0.017187252 0.035523526 0.128704832 0.013079845 41 730
+0.020103614 0.067649389 0.192193809 0.007617113 14 130
+0.052376849 0.136322804 0.22027972 0.025875061 12 110
+0.036114811 0.160293043 0.146763168 0.014992964 21 550
+0.018285078 0.168329621 0.094469436 0.002865406 71 990
+0.004918581 0.090969868 0.031233596 0.002285144 71 590
+0.038520417 0.135836092 0.240466518 0.006776938 14 630
+0.041796035 0.228654168 0.17064891 0.003599489 5 50
+0.035109861 0.116946704 0.242361201 0.007662637 14 330
+0.011047423 0.190990594 0.040656663 0.004057286 33 1110
+0.047513676 0.162435716 0.240539859 0.010078524 14 250
+0.04846572 0.134325892 0.2548899 0.016435061 14 1110
+0.022063056 0.173669289 0.086289432 0.008564648 4 130
+0.021055383 0.236807714 0.076404219 0.003881426 71 230
+0.027772648 0.151013776 0.165077036 0.003349574 13 110
+0.006465263 0.088881252 0.045311745 0.002675719 23 650
+0.014733609 0.08945027 0.10491415 0.005874484 32 2090
+0.02554563 0.155631274 0.096094675 0.012542262 25 730
+0.012855087 0.10240534 0.094000945 0.003597268 35 1090
+0.009583106 0.243537142 0.035697455 0.001175801 71 410
+0.011959276 0.188836527 0.045438493 0.004165411 71 830
+0.014272524 0.066612936 0.152707624 0.004392841 14 430
+0.0129131 0.136815088 0.06268935 0.005023548 25 1390
+0.034029444 0.154045922 0.197883967 0.004191984 24 70
+0.030061647 0.150204336 0.132695412 0.011920773 2 110
+0.027680618 0.132025691 0.139773895 0.010630352 13 1110
+0.040951253 0.288829682 0.113540599 0.01147033 4 70
+0.029664926 0.095626784 0.166742338 0.015170609 14 290
+0.033666787 0.357277731 0.090295738 0.002187773 23 530
+0.014010958 0.077274637 0.109853529 0.005984518 21 1630
+0.05678533 0.19033953 0.08924612 0.049154265 13 870
+0.00863015 0.122380133 0.036422008 0.004754701 23 770
+0.028144363 0.128018744 0.141345094 0.011524952 12 50
+0.005573646 0.080374218 0.03301179 0.003175585 21 1430
+0.02367381 0.121734253 0.160934319 0.004648469 14 170
+0.031318816 0.176093053 0.156690894 0.00452313 13 430
+0.019567685 0.105151266 0.135492749 0.005945643 21 1670
+0.021175636 0.12995814 0.092330291 0.010547267 2 190
+0.027756626 0.174117988 0.11578509 0.009197875 23 170
+0.014410648 0.039262232 0.048064221 0.013035335 51 1110
+0.008782549 0.121671031 0.055837563 0.002264225 31 1930
+0.019223268 0.154800356 0.103316198 0.003821444 23 210
+0.015547489 0.144425321 0.067093932 0.006846189 13 450
+0.028028423 0.282173472 0.083041871 0.006402954 23 1030
+0.012831858 0.111106195 0.060533652 0.006869431 71 710
+0.008555471 0.070280494 0.036918294 0.006411433 49 290
+0.046477387 0.252729994 0.1489465 0.011821884 5 30
+0.013295708 0.179249693 0.056982809 0.003754561 25 1170
+0.046014441 0.24453726 0.171282447 0.005466188 1 70
+0.010421775 0.161951324 0.04845273 0.003072365 23 1210
+0.020907942 0.279887297 0.068209615 0.002523129 25 310
+0.004582108 0.113176998 0.018326321 0.002828061 43 250
+0.006093405 0.221923863 0.018149665 0.002654703 73 730
+0.036994878 0.107569721 0.285342987 0.007060061 14 970
+0.005850484 0.078393606 0.04499862 0.002520469 31 1530
+0.016074802 0.206441254 0.05142282 0.006879151 23 1470
+0.020872372 0.150524545 0.058064516 0.014282034 13 890
+0.050861329 0.220053097 0.219430334 0.003301513 13 570
+0.023066325 0.100604944 0.147197533 0.009181199 21 1190
+0.006760423 0.121101634 0.039516129 0.00224708 25 250
+0.08578302 0.173909669 0.080053548 0.086989197 2 30
+0.013691061 0.232134086 0.050074936 0.002691826 73 270
+0.062180155 0.259000951 0.189980766 0.017510084 1 150
+0.034470156 0.221343797 0.056139741 0.028310277 71 970
+0.004737137 0.158004669 0.020736133 0.001734844 21 430
+0.043179292 0.159486131 0.230981067 0.007544212 24 130
+0.017984202 0.149156398 0.097271649 0.004084785 21 990
+0.01301556 0.207400869 0.043061003 0.005153514 23 990
+0.028259457 0.157324841 0.157068495 0.004211208 14 850
+0.025960452 0.204666138 0.094802735 0.008245019 2 310
+0.047746365 0.189466328 0.220093008 0.00745947 24 810
+0.024022295 0.293334562 0.06705402 0.006159963 23 610
+0.010920891 0.268882741 0.036206096 0.00162176 64 930
+0.010190811 0.059935244 0.077769347 0.005882239 49 2010
+0.019536623 0.070749231 0.201981536 0.005646037 14 110
+0.008694267 0.082469932 0.042124814 0.005689444 34 1890
+0.014004458 0.170282318 0.059991274 0.004566619 21 1610
+0.051865247 0.185051236 0.264205574 0.003648915 14 730
+0.058025622 0.268158368 0.185833694 0.011194719 12 350
+0.009051584 0.06542276 0.060364055 0.005459582 24 1130
+0.005083593 0.215226058 0.013999569 0.002638367 73 610
+0.035126257 0.278338479 0.118023462 0.003153536 25 590
+0.051933188 0.234636501 0.174678354 0.014303361 3 150
+0.033825253 0.25365492 0.104873241 0.009678686 2 10
+0.031355932 0.249864407 0.115655949 0.003276244 71 130
+0.018366017 0.146049332 0.106348455 0.003318571 13 830
+0.006128761 0.048011801 0.047836141 0.004025325 22 970
+0.025018345 0.174107962 0.121962463 0.004581361 13 270
+0.017153222 0.203880177 0.073947521 0.002608638 71 770
+0.023752385 0.131693873 0.147907276 0.004922115 11 30
+0.020041524 0.112065583 0.116062902 0.007922733 14 770
+0.018101555 0.171807883 0.079476038 0.005369461 23 1450
+0.013702068 0.186968774 0.062972907 0.002371497 21 1970
+0.024919702 0.16387737 0.110315534 0.008182391 43 570
+0.024378901 0.155360847 0.114171107 0.007862743 13 710
+0.017113686 0.276671258 0.054592315 0.002778213 71 810
+0.012532289 0.063501597 0.089537515 0.007310758 24 490
+0.030354257 0.255601524 0.111504322 0.00249004 1 50
+0.02534785 0.145991845 0.157443133 0.002766293 13 70
+0.024159794 0.240271849 0.080947453 0.006200113 25 1010
+0.029232836 0.196661362 0.131657484 0.004158764 12 10
+0.032725797 0.191080218 0.154392043 0.00398622 24 1550
+0.029763987 0.202568936 0.116509723 0.007728238 12 270
+0.010522451 0.173422357 0.049364832 0.00237302 21 890
+0.017161514 0.163996798 0.066637416 0.007455942 2 50
+0.019709772 0.177589365 0.098293368 0.002740621 71 670
+0.019944317 0.167898128 0.095562052 0.004686478 12 250
+0.017965749 0.153377081 0.099603464 0.003175984 22 1410
+0.014459647 0.232586232 0.054712809 0.002259799 21 970
+0.030741942 0.229125552 0.12458344 0.002849611 24 930
+0.022995739 0.072263333 0.14778956 0.01327529 45 710
+0.011919138 0.160037953 0.031897692 0.008112625 73 630
+0.029471658 0.210749202 0.11763292 0.005930452 3 30
+0.030079033 0.203151688 0.137849135 0.002603694 71 190
+0.017242824 0.222627278 0.067476002 0.002856835 21 2010
+0.003402401 0.135777652 0.017012276 0.001264159 71 730
+0.02275185 0.155293113 0.130886934 0.002872014 14 490
+0.024846743 0.214549808 0.103308183 0.003414593 13 130
+0.025522241 0.129174493 0.174439223 0.003432539 24 1510
+0.006148198 0.155340454 0.029823807 0.001794041 67 350
+0.044215252 0.175459006 0.218845631 0.00705461 14 950
+0.023876977 0.227437678 0.095817364 0.002698162 13 930
+0.043793807 0.251198377 0.15364959 0.006940796 1 110
+0.012702477 0.155341756 0.064358436 0.00320239 23 490
+0.043971141 0.217807923 0.182838781 0.005302286 71 850
+0.045424734 0.183060538 0.192569804 0.012452333 14 510
+0.048325022 0.238300448 0.188004701 0.004625731 14 70
+0.01009476 0.072544232 0.101095485 0.002976817 34 950
+0.05277881 0.154293332 0.306763456 0.006441068 14 1070
+0.047775325 0.189258176 0.220276046 0.007507053 14 210
+0.016823053 0.165982183 0.084065074 0.003440872 13 10
+0.012099829 0.220820025 0.044602913 0.002888438 73 530
+0.009833372 0.186978308 0.038336408 0.003278259 23 1250
+0.023191041 0.144257364 0.13411323 0.004492261 14 910
+0.01952221 0.145613443 0.103880422 0.005145006 14 450
+0.02939448 0.15965176 0.159977849 0.004585878 12 70
+0.02822889 0.230209786 0.110930172 0.00349664 3 230
+0.015397969 0.17061916 0.079590457 0.002192372 35 550
+0.010027242 0.133275366 0.059103326 0.002480862 62 310
+0.048776412 0.220399596 0.197045978 0.006859357 14 1250
+0.014100721 0.177709714 0.067446359 0.002571899 23 810
+0.034806515 0.284891907 0.114396 0.003098861 13 630
+0.021919659 0.262897754 0.072341916 0.00393586 13 1030
+0.056016066 0.209695534 0.211678407 0.014713379 13 650
+0.032515588 0.301649694 0.093608557 0.006126718 4 110
+0.038852369 0.24564478 0.143304289 0.004839124 12 210
+0.016166702 0.134081037 0.095911224 0.003818862 24 950
+0.0322372 0.306935691 0.099247038 0.002560717 13 850
+0.017192835 0.150131305 0.098389621 0.002849232 24 1530
+0.008401718 0.090647894 0.05723881 0.003533439 24 610
+0.043153652 0.251785539 0.161582985 0.003300381 24 990
+0.023903717 0.183021183 0.119159997 0.00256422 13 670
+0.049295965 0.198472938 0.225371304 0.005696452 14 1290
+0.011088576 0.196947119 0.047261848 0.002217153 33 1230
+0.041251253 0.266268604 0.14637981 0.003100461 22 890
+0.05147537 0.318390943 0.152330156 0.00436442 12 230
+0.008858592 0.1839955 0.041332453 0.001536275 72 510
+0.018999363 0.3296128 0.053939714 0.001820056 33 1370
+0.052526292 0.232097949 0.209670894 0.005029427 14 690
+0.022060544 0.260877741 0.078331491 0.002199368 13 590
+0.03458292 0.275334894 0.119738741 0.002228156 12 390
+0.019654333 0.269752281 0.063127948 0.003595252 3 210
+0.022892059 0.134409922 0.133594913 0.00570198 52 5100
+0.056502759 0.223422572 0.231035691 0.006289355 14 790
+0.030467397 0.283480373 0.094763415 0.00502963 3 130
+0.012902165 0.128642741 0.079551294 0.003062427 34 5100
+0.019801082 0.211140732 0.08186704 0.003188928 71 10
+0.039095637 0.283612107 0.129586006 0.00327124 12 30
+0.031667318 0.243613041 0.124424873 0.001792463 13 550
+0.007772984 0.202432312 0.029751516 0.002194567 33 530
+0.052942135 0.364226086 0.137159979 0.004694896 12 310
+0.005157315 0.233826978 0.018553021 0.001069104 73 330
+0.038243812 0.301655928 0.121144461 0.00243414 1 10
+0.080252156 0.337650064 0.222446455 0.007764922 3 50
+0.047258878 0.312532071 0.145575507 0.00256283 1 30
+0.049972065 0.293547792 0.164607105 0.002338464 1 90
+0.037563143 0.290459565 0.114771821 0.005956771 3 270
+0.029858118 0.287569609 0.098036574 0.002338164 13 1190
+0.032615971 0.297065072 0.103230389 0.002773839 3 90
+0.019675556 0.223434276 0.080150011 0.002275784 25 790
+0.047226494 0.32243621 0.133423768 0.006207297 5 70
+0.023397395 0.239576465 0.090370766 0.002296991 13 290
+0.011697809 0.285626983 0.036827102 0.001650391 71 750
+0.040107072 0.326229989 0.117058905 0.002848074 12 170
+0.035896639 0.28560171 0.117720369 0.003185198 12 130
+0.029969706 0.216514622 0.127197266 0.003101064 14 30
+0.025627785 0.29855302 0.079102153 0.002867783 3 170
+0.034354869 0.360082754 0.092527715 0.001620888 3 250
+0.022479478 0.298661944 0.071596468 0.001563208 13 810
+0.034973194 0.292875657 0.113496082 0.002450706 24 350
+0.005841411 0.164866861 0.02754077 0.001557657 71 370
+0.025706327 0.252071834 0.095513705 0.002179371 14 1010
+0.022423208 0.329358004 0.063435974 0.002281489 23 1630
+0.039776269 0.452586066 0.085407737 0.002049485 13 50
+0.057545637 0.445638267 0.119284642 0.007915112 13 610
+0.052674083 0.417550216 0.121019887 0.003677919 13 470
+0.030460848 0.30358287 0.095299373 0.002196372 21 310
\ No newline at end of file
diff --git a/data/housep88.txt b/data/housep88.txt
new file mode 100644
index 0000000..9ec3524
--- /dev/null
+++ b/data/housep88.txt
@@ -0,0 +1,425 @@
+Y X N HPCT DIST
+0.772442758 0.556025386 240757 0.948873761 101
+0.636181653 0.491232604 235760 0.955594673 102
+0.664928317 0.495993853 236761 0.936256394 103
+0.273834199 0.427683324 224879 0.905349099 104
+0.263613105 0.40780127 239217 0.929453174 105
+0.334192693 0.463986903 249451 0.945412125 106
+0.632175267 0.436388463 300370 1 201
+0.337903589 0.449370831 250330 1 202
+0.172718108 0.583035111 230264 0.978281451 301
+0.9999 0.526657283 216226 0.722679049 302
+0.9999 0.494754016 244378 0.781596543 303
+0.703131855 0.573527336 245861 0.981814928 304
+0.9999 0.481470197 233246 0.779691828 305
+0.697106659 0.510452449 257117 0.991101327 306
+0.9999 0.538641274 247365 0.762626079 307
+0.804349184 0.666264474 212177 0.97117501 308
+0.9999 0.544589341 212663 0.756121187 309
+0.666726053 0.513545215 280283 1 310
+0.808176458 0.521750152 217470 0.965507886 311
+0.396879852 0.355710506 226600 0.962338923 401
+0.428218067 0.380052418 218633 0.957856316 402
+0.443671674 0.423991144 196089 0.967147571 501
+0.278767496 0.542440593 206795 0.973655069 502
+0.476421297 0.48218286 240106 0.786998242 601
+0.675166428 0.437239408 248221 0.944791134 1101
+0.702823937 0.475355566 205442 0.983367568 1201
+0.66499722 0.412408859 216700 0.933382557 1202
+0.521258056 0.375388354 242682 0.925091272 1203
+0.406641662 0.441702247 250447 0.775768925 1204
+0.237979084 0.332924873 247275 0.931715701 1205
+0.61617732 0.467418194 216179 0.901808224 1206
+0.253912896 0.406810045 223200 0.920873656 1207
+0.9999 0.455304563 188017 0.510783599 1208
+0.675042212 0.463955849 228123 0.922199866 1209
+0.859452546 0.790391505 115086 0.856133674 1210
+0.295191556 0.35160476 241780 0.907548184 1211
+0.301330239 0.376588017 261174 0.909267385 1212
+0.305193096 0.381989956 258478 0.932501025 1213
+0.687409937 0.550989926 169308 0.893602192 1214
+0.508147299 0.395015508 216832 0.958626955 1301
+0.616465628 0.385930449 183318 0.952541485 1302
+0.584786177 0.401196212 236246 0.928938479 1303
+0.282537758 0.411637336 228626 0.920792036 1304
+0.338205487 0.438472688 221934 0.918340588 1305
+0.858731687 0.723699212 142334 0.773202467 1306
+0.9999 0.603277385 144567 0.644130403 1307
+0.9999 0.592715859 171551 0.584316034 1308
+0.9999 0.513522387 136958 0.531922195 1309
+0.814925909 0.565054357 158406 0.829318334 1310
+0.908614933 0.839631855 95393 0.850932458 1311
+0.930125415 0.904099107 112074 0.712796902 1312
+0.747007132 0.566795528 129567 0.840036429 1313
+0.367004514 0.390100539 172676 0.907375663 1314
+0.374511689 0.667998731 196186 0.876841365 1315
+0.9999 0.884207845 129197 0.832991478 1316
+0.843663394 0.790907502 214264 0.8703982 1317
+0.929034889 0.875646532 104983 0.773677643 1318
+0.673210502 0.54291749 158094 0.724961099 1319
+0.514519393 0.482724279 216952 0.915870792 1320
+0.239176273 0.381043732 219484 0.900917607 1321
+0.273558348 0.42963323 226975 0.87471748 1322
+0.617459476 0.568134487 247195 0.950253848 1323
+0.276171595 0.410414964 241134 0.933667587 1324
+0.0001 0.453036904 208930 0.623398649 1325
+0.2495877 0.442790985 196962 0.886607569 1326
+0.421045303 0.47071746 219397 0.98352302 1327
+0.932 0.477090567 226697 0.672135053 1328
+0.27872026 0.466397673 213631 0.860600756 1329
+0.590206444 0.454887569 233849 0.930044601 1330
+0.466136336 0.439089984 231998 0.950383193 1331
+0.727232754 0.515002906 202927 0.907449477 1332
+0.9999 0.666149616 195345 0.714653562 1333
+0.0001 0.400660932 196992 0.66539758 1334
+0.763152122 0.668560326 181128 0.926554702 1401
+0.93713361 0.916093051 204405 0.962241628 1402
+0.632124186 0.488729268 225540 0.951046378 1403
+0.70300889 0.546853662 178812 0.986751448 1404
+0.217918471 0.347172916 203956 0.962026123 1405
+0.636125445 0.387592018 182963 0.980509721 1406
+0.321845055 0.398654699 234002 0.979188212 1407
+0.577783704 0.388107508 226950 0.977312183 1408
+0.0001 0.365979105 167791 0.945831421 1409
+0.267567635 0.418183893 192566 0.993295805 1410
+0.9999 0.474384487 178954 0.674508533 1411
+0.9999 0.527373314 182349 0.729814806 1412
+0.334291279 0.435259581 239549 0.954355894 1413
+0.786393225 0.732332885 191981 0.895401107 1414
+0.425235659 0.449110538 188428 0.987528393 1415
+0.260214031 0.301736146 189616 0.976252004 1416
+0.0001 0.363580525 177416 0.937282996 1417
+0.628382862 0.462958127 230118 0.946901155 1418
+0.227518603 0.343426853 191179 0.984417745 1419
+0.9999 0.652104557 193081 0.71199134 1420
+0.212569401 0.507519901 185906 0.968876744 1421
+0.724058032 0.650121689 177053 0.962802099 1422
+0.375484318 0.430485487 171288 0.986940124 1423
+0.960480571 0.960606933 181961 0.939091344 2101
+0.827449203 0.853099823 176283 0.947833881 2102
+0.622307956 0.413570762 222714 0.953204558 2103
+0.502861857 0.443508506 186382 0.973940617 2104
+0.612770557 0.481107146 161675 0.944456471 2105
+0.263191015 0.313659847 214739 0.969684128 2106
+0.9999 0.775367081 171088 0.791002291 2107
+0.756582499 0.590533257 156020 0.912607358 2108
+0.667281866 0.615196705 223114 0.910686913 2109
+0.275195926 0.376272351 229302 0.953790198 2110
+0.645082772 0.462283731 219229 0.931637694 2111
+0.248180643 0.291601241 227962 0.968064853 2112
+0.298393041 0.313647479 261118 0.955185012 2113
+0.262921482 0.359358698 226292 0.966132254 2114
+0.282759637 0.373001665 199227 0.980946358 2115
+0.360719323 0.423149794 202179 0.993159527 2116
+0.648681819 0.52943027 203311 1 2117
+0.452932537 0.452225208 209148 1 2118
+0.642379284 0.455635995 209821 0.985935631 2119
+0.688727319 0.49194631 223748 0.995065878 2120
+0.525968969 0.543455601 203610 0.988266785 2121
+0.648775458 0.518779039 217956 0.98576777 2122
+0.770943582 0.588463426 190785 0.939942868 2201
+0.53200978 0.347395569 220970 0.994528669 2202
+0.457116455 0.404799193 215119 0.995927835 2203
+0.37853554 0.33154875 214617 0.995997521 2204
+0.563170135 0.349112242 209288 0.986215168 2205
+0.271134794 0.303530812 270816 0.973025966 2206
+0.382081121 0.367760837 215477 0.980703277 2207
+0.618145347 0.42303738 230547 0.991884518 2208
+0.707186043 0.421448082 212942 0.977444562 2209
+0.60544318 0.515149653 178057 0.981842893 2210
+0.920888603 0.893406272 141650 0.979731733 2301
+0.450128227 0.419475168 224943 0.970734808 2302
+0.573421121 0.457823843 199461 0.98452329 2303
+0.291529626 0.364450783 195903 0.95301246 2304
+0.274009854 0.355922163 235987 0.972244234 2305
+0.597942054 0.424369901 209890 0.960788985 2306
+0.762151182 0.556785643 201336 0.982948901 2307
+0.72078681 0.498529971 202718 0.957482809 2308
+0.302170187 0.372814894 229221 0.936175132 2309
+0.266280204 0.415841907 217234 0.957695388 2310
+0.401306063 0.47267589 222203 0.970333434 2311
+0.540957689 0.393832535 209259 0.955457113 2312
+0.883166432 0.862275422 117096 0.964567534 2313
+0.632857442 0.440740377 192281 0.917209709 2314
+0.647416711 0.457098126 175004 0.923173185 2315
+0.9999 0.462648958 197277 0.673038418 2316
+0.710540652 0.533973873 197446 0.96578305 2317
+0.218378082 0.294786304 272360 0.918717873 2318
+0.564638734 0.367840976 208408 1 2401
+0.276852101 0.375414282 222064 0.953774588 2402
+0.768905342 0.451730698 193622 0.953491855 2403
+0.0001 0.319134712 206499 0.775301575 2404
+0.393872052 0.383572042 208425 0.980256687 2405
+0.256919026 0.351467222 211115 0.970418966 2406
+0.261231989 0.355201632 196083 0.984379064 2407
+0.241498068 0.309010327 213022 0.954117415 2408
+0.813239396 0.538392127 200523 0.966173456 2409
+0.28366223 0.387751013 205307 0.976907753 2410
+0.61491996 0.447006434 209950 0.96512503 2411
+0.198620111 0.383050203 225289 0.869571972 2412
+0.69807142 0.460648984 202968 0.967448071 2413
+0.747344553 0.529066443 208333 0.956675131 2414
+0.248569459 0.372352898 219437 0.93815537 2415
+0.214442581 0.42770645 203310 0.994441985 2416
+0.772260129 0.620460749 210139 1 2417
+0.776190937 0.522946656 196957 0.989728723 2418
+0.704899669 0.451060086 255354 0.933699883 2419
+0.825823605 0.572684109 181484 0.978923762 2420
+0.856783211 0.809622884 177889 0.973595894 2421
+0.761639416 0.515134156 221453 0.940027907 2501
+0.585040808 0.559838772 259255 0.99885441 2502
+0.31649223 0.528271139 239184 0.963475818 2503
+0.9999 0.563709855 249820 0.709642943 2504
+0.642147839 0.642635286 225677 0.969638022 2505
+0.25753215 0.455559194 234773 0.951876919 2506
+0.62292248 0.543231189 237190 0.962409882 2507
+0.302971452 0.469711065 249596 0.961485761 2508
+0.250926763 0.397625744 257345 0.960174085 2509
+0.387336582 0.551767647 193480 0.951137068 3101
+0.432231069 0.567504883 200867 0.995589121 3102
+0.633707047 0.570292413 207967 0.980376694 3103
+0.716795743 0.591131866 222211 0.986094298 3104
+0.361244291 0.52178371 193057 0.954951128 3105
+0.356131017 0.499752939 198330 0.985594716 3106
+0.0001 0.383078426 195041 0.864946345 3201
+0.732720613 0.46293354 185100 1 3202
+0.264269143 0.450612575 221443 0.922052176 3203
+0.639656782 0.43723917 192652 0.996314598 3204
+0.298419654 0.428673208 182449 0.997807607 3205
+0.704103947 0.487000257 239043 0.957262919 3301
+0.421761006 0.487195969 231607 0.982936612 3302
+0.316637695 0.450752199 332959 0.946338738 3303
+0.729871094 0.615704596 260115 0.954577783 3304
+0.741999984 0.661872208 251680 0.93397171 3305
+0.655157626 0.513294399 288392 0.897025576 3306
+0.454171866 0.480273217 230296 0.965744086 3307
+0.745238125 0.608995616 237716 0.935090612 3308
+0.726044595 0.72488153 198380 0.977215445 3401
+0.329545647 0.390931755 284377 0.977909606 3402
+0.633861959 0.469878197 238266 0.994552307 3403
+0.717979252 0.405638695 234487 0.98885226 3404
+0.71160537 0.606644213 213359 0.982470859 3405
+0.406652957 0.499556541 231143 0.990776273 3406
+0.465099931 0.384815574 241326 0.991119896 3407
+0.418973118 0.450153947 202975 0.997176992 3408
+0.679128647 0.455850959 242259 0.977796491 3409
+0.330438018 0.437354416 219808 0.993585311 3501
+0.506733179 0.419465452 224264 0.987082189 3502
+0.29631111 0.323903561 213119 0.715492284 3503
+0.715627909 0.434046447 294298 1 3601
+0.717369437 0.468076199 310975 1 3701
+0.26738739 0.388064235 214632 0.864507622 4001
+0.630290806 0.400712311 178577 0.947675233 4002
+0.0001 0.364636064 237711 0.788158731 4003
+0.9999 0.447149038 199249 0.676470145 4004
+0.553650141 0.372938156 190667 0.921176711 4005
+0.63878274 0.381188393 195709 0.946834331 4006
+0.0001 0.337285936 225927 0.606337445 4007
+0.37677291 0.394680917 259744 0.956025933 4008
+0.634006441 0.456208438 181656 0.983832078 4009
+0.31890592 0.427508593 285089 0.97104413 4010
+0.40276286 0.376302481 194822 0.98984201 4101
+0.0001 0.375192642 190129 0.633296341 4102
+0.652907372 0.390425563 174005 0.67534841 4103
+0.9999 0.42671755 194651 0.67752028 4104
+0.618534148 0.408457935 188935 0.977230264 4105
+0.66882354 0.427546531 211591 0.981648558 4106
+0.682259858 0.40874809 210949 0.945470232 4107
+0.834580243 0.430873692 212835 0.950802265 4202
+0.253093958 0.336846054 220911 0.979534745 4203
+0.691941917 0.473954767 196869 0.950713419 4204
+0.668828011 0.267846107 220328 0.966663338 4301
+0.9999 0.403213799 206982 0.648698921 4302
+0.498420686 0.359434783 265311 0.943892262 4304
+0.465397716 0.389015585 261771 0.974787123 4306
+0.269909024 0.440078586 234140 0.989595968 4308
+0.0001 0.394695699 294931 0.759245383 4309
+0.26530391 0.335680693 207894 1 4310
+0.60842669 0.290483505 270215 1 4311
+0.288423061 0.320348203 332226 0.977858446 4313
+0.548519135 0.468752474 315017 1 4314
+0.339007974 0.467194796 192881 1 4315
+0.694044709 0.444173068 218631 1 4316
+0.723889351 0.410906702 189975 0.984250559 4319
+0.670038223 0.397830456 157175 0.897617306 4401
+0.616885304 0.408454597 140586 0.98043902 4402
+0.9999 0.424750388 146034 0.668768917 4403
+0.602886975 0.409611493 242210 1 4404
+0.781978965 0.684946716 165426 1 4405
+0.410838902 0.327985078 180246 1 4406
+0.647809625 0.293024749 219763 0.948662878 4407
+0.9999 0.462208748 159058 0.645651272 4408
+0.628791213 0.289605439 195718 0.989714794 4409
+0.639798105 0.350587398 189907 0.972460204 4410
+0.158161163 0.290481329 210733 0.392771896 4501
+0.894691765 0.685889661 164716 0.432665922 4502
+0.9999 0.442784965 205086 0.351608593 4503
+0.279557139 0.407933891 189239 0.529779802 4504
+0.888674438 0.365867853 192411 0.298922619 4505
+0.432430863 0.537280023 214458 0.954988856 4508
+0.781710327 0.403176755 183143 0.960047613 4601
+0.652629077 0.514674067 169789 1 4602
+0.888181031 0.338933945 188848 0.981636025 4603
+0.554756165 0.427170128 199204 0.997053272 4604
+0.450194538 0.296999246 180455 1 4605
+0.651938796 0.460926831 180725 1 4701
+0.9999 0.502595127 186504 0.690773388 4702
+0.9999 0.422920108 148638 0.641309759 4703
+0.580073714 0.442951918 237852 0.955964213 4704
+0.526126385 0.396654725 205065 1 4705
+0.375293493 0.39126572 194773 0.957740549 4706
+0.672538698 0.486038923 150669 1 4707
+0.51492393 0.381118923 192722 0.999766503 4708
+0.340590835 0.388189435 230099 0.916196941 4709
+0.389683276 0.345419675 184092 1 4710
+0.503550172 0.405809909 216286 0.995640032 4711
+0.362237304 0.3845478 162773 0.978436227 4801
+0.469313383 0.394368023 171023 1 4802
+0.540998042 0.335324526 162550 1 4803
+0.521502435 0.323364705 168763 1 4804
+0.698397577 0.401864082 152783 1 4805
+0.760784507 0.440193594 159105 0.997309953 4806
+0.622405708 0.473324448 204963 0.960773408 4901
+0.9999 0.500894845 197794 0.736190178 4902
+0.181778684 0.253108799 288133 0.966598758 4903
+0.674253583 0.389256299 214712 0.962759417 4904
+0.614326298 0.501416087 161006 0.964268412 4905
+0.323585689 0.375075191 254357 0.957229406 4906
+0.208625495 0.225940078 251310 0.931228363 4907
+0.0001 0.453342557 164529 0.550073239 4908
+0.9999 0.528748512 199645 0.687570438 4909
+0.9999 0.539846182 288999 0.803507971 4910
+0.9999 0.417394519 182046 0.73721477 4911
+0.9999 0.470034719 185765 0.729195489 4912
+0.52464658 0.362070054 189850 0.987358441 4913
+0.537006438 0.425015688 212011 0.978425648 4914
+0.9999 0.62938571 172778 0.542152357 4915
+0.9999 0.524462342 132612 0.788118722 4916
+0.9999 0.419283032 203495 0.732519227 4917
+0.9999 0.7519719 122599 0.770055221 4918
+0.322946101 0.331216335 164699 1 4919
+0.719777942 0.675657511 137028 0.958402662 4920
+0.0001 0.29105109 271296 0.751905668 4921
+0.317425251 0.370739758 190805 0.965404471 4922
+0.65782398 0.495337933 187900 0.941894625 4923
+0.9999 0.526330888 184973 0.778654182 4924
+0.720436454 0.513055146 165452 0.952191572 4925
+0.307318956 0.312475294 296049 0.950633172 4926
+0.9999 0.536854208 164771 0.637763927 4927
+0.9999 0.490031332 188541 0.6220822 5101
+0.606110811 0.414658457 175489 0.866669706 5102
+0.696955144 0.535152733 189587 0.998844858 5103
+0.257793695 0.347783089 208262 0.942005743 5104
+0.0001 0.331891984 180616 0.649377685 5105
+0.260214537 0.424565315 194917 0.893903559 5106
+0.587156534 0.556435704 177237 0.931583134 5107
+0.50403744 0.365562856 206736 0.922509868 5201
+0.285493284 0.375775605 227399 0.972167864 5202
+0.728993177 0.544984221 208940 0.87829999 5203
+0.683036685 0.422049612 207677 0.906754239 5204
+0.786288023 0.59530288 180068 0.907135082 5205
+0.753580272 0.342652202 241589 0.915939881 5206
+0.9999 0.823821545 149621 0.786320102 5207
+0.372515887 0.53673321 280441 0.980944298 5208
+0.473654211 0.387122124 197222 0.996638306 5301
+0.648687422 0.468540311 207440 1 5302
+0.278288811 0.32121861 213982 0.90124403 5305
+0.730922043 0.415072501 173707 0.967871185 5306
+0.197785154 0.316661447 171966 0.866421269 5401
+0.437656283 0.358389795 182907 0.968639801 5402
+0.57393074 0.376800954 188094 1 5403
+0.9999 0.422295719 157842 0.596349514 5404
+0.9999 0.477041304 199467 0.777411802 5405
+0.764770985 0.395534843 184540 0.876151512 5406
+0.198784843 0.327093244 205446 0.862815533 5407
+0.624008596 0.435729563 159319 0.951261306 5408
+0.9999 0.662122548 177446 0.711653123 5409
+0.745405912 0.514387965 173270 0.923345068 5601
+0.9999 0.483746588 167811 0.705293455 5602
+0.74344033 0.526088893 171414 0.943155168 5603
+0.61301285 0.58109045 138586 0.927691109 5604
+0.278829962 0.344622076 262267 0.976207453 6101
+0.733421922 0.561140895 138099 0.98627796 6102
+0.293340623 0.350595295 255166 0.967487831 6103
+0.0001 0.346719414 256037 0.805539824 6104
+0.322021455 0.440488011 245001 0.990106163 6105
+0.699298739 0.627917647 198233 0.96608032 6201
+0.627351463 0.512081802 234982 1 6202
+0.780437887 0.470739871 215703 1 6203
+0.269244075 0.442268997 216253 0.988444091 6204
+0.248692751 0.318696439 249256 0.969798119 6205
+0.361150503 0.425292194 235203 0.908343006 6206
+0.342147142 0.398373425 201773 1 6301
+0.652241051 0.335495025 199380 0.98394523 6302
+0.607740283 0.490526497 186573 1 6401
+0.444751441 0.44814375 172795 1 6402
+0.655054331 0.427173227 158060 0.982867266 6501
+0.414935082 0.360888243 180718 1 6502
+0.483210146 0.458212972 178273 0.976721096 6601
+0.0001 0.41179207 163449 0.613793905 6602
+0.731093347 0.551670551 173116 0.987153123 6603
+0.401957333 0.272618771 223697 0.978417234 6701
+0.582969844 0.407065988 211860 0.907868404 6702
+0.315935761 0.300247729 200228 0.94876341 6703
+0.449940473 0.385751247 173980 0.777267502 6801
+0.688845873 0.560997128 259914 0.892618328 7101
+0.395866096 0.409513772 234986 0.979198761 7102
+0.711769581 0.496125877 263028 0.979994525 7103
+0.9999 0.484820396 251324 0.720918018 7104
+0.798519313 0.720096409 206664 0.80914915 7105
+0.733517706 0.647865117 253596 0.949616713 7106
+0.684172809 0.539308548 252477 0.984184698 7107
+0.680790961 0.705977261 253494 0.945789644 7108
+0.729594052 0.575824082 216033 0.969861086 7109
+0.9999 0.558105469 182315 0.781614239 7110
+0.74403429 0.58654964 215414 0.907712591 7111
+0.471189231 0.498535365 266617 0.967331415 7112
+0.692414582 0.502316535 218647 0.951026083 7113
+0.374040127 0.402170002 277420 1 7114
+0.712258577 0.472468495 175290 0.950807234 7115
+0.785787344 0.549674034 222712 1 7116
+0.285331339 0.405941129 187271 0.968105046 7117
+0.699391901 0.534705698 189162 0.95023842 7118
+0.491245687 0.452880979 225079 1 7119
+0.275921106 0.346893579 230232 0.976562771 7120
+0.294530094 0.349460006 274452 0.936965298 7121
+0.272061497 0.353208035 245555 0.921398465 7122
+0.657024026 0.56463325 244874 0.91901141 7123
+0.752574325 0.65669179 162408 0.916660509 7124
+0.9999 0.679387152 108932 0.783773363 7125
+0.70341593 0.559238315 194300 0.928708183 7126
+0.694999576 0.551826715 229592 0.932615248 7127
+0.793096244 0.735522985 153813 0.90009297 7128
+0.858409941 0.806795299 114138 0.899893112 7129
+0.622409225 0.533738077 123866 0.937190189 7130
+0.736979306 0.652131915 148059 0.924874543 7131
+0.693365395 0.496017665 177409 0.932173678 7132
+0.275149316 0.369233429 225654 0.927481897 7133
+0.64378047 0.509176731 149563 0.956392958 7134
+0.282048732 0.33765009 260998 0.967015073 7135
+0.559706032 0.479850829 192812 0.958996328 7136
+0.339708269 0.385061443 280750 0.940160285 7137
+0.374040782 0.382617861 151765 0.923065265 7138
+0.235470966 0.284608185 233637 0.948146056 7139
+0.308268249 0.313325405 278723 0.940184341 7140
+0.320032507 0.408527762 296866 0.928270668 7141
+0.339475483 0.343296617 252126 0.920404877 7142
+0.263654768 0.312389404 290490 0.946597129 7143
+0.620585501 0.521456361 155129 0.9431312 7144
+0.244993493 0.33194524 229574 0.96031345 7145
+0.695885777 0.521170199 264735 0.976603018 7201
+0.373376787 0.444251627 221289 0.904093742 7202
+0.9999 0.615304828 233286 0.817382955 7203
+0.719846308 0.547806919 219121 0.687761556 7204
+0.498409599 0.489060581 237901 0.934300402 7205
+0.446129829 0.503365934 280162 0.981257273 7301
+0.9999 0.50331229 240316 0.729002647 7302
+0.501416087 0.520931602 218426 0.998992794 7303
+0.25470832 0.419362217 187871 1 7304
+0.763887763 0.485593706 204494 1 7305
+0.675994635 0.51101023 199178 0.935093233 7306
+0.763287663 0.675063014 240779 0.945726164 7307
+0.291336864 0.433416635 266125 0.927616721 7308
+0.373454362 0.378366828 191835 1 8101
+0.440966964 0.54586935 174670 0.991824584 8201
+0.9999 0.550465107 176409 0.820831137 8202
diff --git a/data/reg.txt b/data/reg.txt
index 603cbcc..56041ab 100644
--- a/data/reg.txt
+++ b/data/reg.txt
@@ -1,276 +1,276 @@
-"X" "Y" "N"
-0.2475 0.8875 40000
-0.18181818 0.95454545 4400
-0.13550136 0.80758808 36900
-0.1884058 0.85507246 6900
-0.072625698 0.97206704 89500
-0.11002445 0.96821516 245400
-0.13636364 0.95454545 4400
-0.028571429 1 14000
-0.086021505 0.97849462 9300
-0.10569106 0.97560976 12300
-0.10769231 0.95384615 13000
-0.2421875 0.953125 12800
-0.1233279 0.7548124 613000
-0.17105263 0.72368421 7600
-0.12121212 1 3300
-0.22273074 0.78032037 262200
-0.19007392 0.84899683 94700
-0.28571429 0.85714286 2800
-0.18181818 0.93181818 4400
-0.5125 0.48333333 24000
-0.095238095 0.95238095 2100
-0.38888889 0.72222222 1800
-0.23636364 0.87272727 5500
-0.36363636 0.88636364 4400
-0.069444444 0.77777778 7200
-0.24489796 0.91836735 4900
-0.16 0.96 7500
-0.16197183 0.94366197 14200
-0.12688698 0.84210526 245100
-0.03030303 1 6600
-0.16352201 0.94339623 15900
-0.27692308 0.78974359 19500
-0.50980392 0.78431373 5100
-0.1 0.95 2000
-0.17344173 0.72086721 36900
-0.10375276 0.9602649 45300
-0.29146919 0.87203791 42200
-0.25806452 0.83870968 6200
-0.090909091 1 2200
-0.4025974 0.87012987 7700
-0.11134454 0.8592437 47600
-0.2971246 0.87859425 31300
-0.13953488 0.96899225 12900
-0.10211268 0.67957746 28400
-0.22826087 0.89130435 9200
-0.063829787 0.86322188 32900
-0.125 0.975 4000
-0.13659148 0.77318296 159600
-0.085271318 0.96899225 12900
-0.1887396 0.93090211 156300
-0.068181818 0.96306818 35200
-0.066179159 0.92431444 273500
-0.16480687 0.79828326 116500
-0.28021978 0.72527473 18200
-0.23756906 0.7679558 18100
-0.2742616 0.89873418 23700
-0.06875 0.98125 16000
-0.073476703 0.96057348 55800
-0.73899371 0.29874214 95400
-0.21126761 0.94366197 7100
-0.24418605 0.89534884 8600
-0.22972973 0.91891892 7400
-0.275 0.575 4000
-0.13519814 0.93822844 85800
-0.22222222 0.97222222 3600
-0.11702128 0.96808511 9400
-0.15625 0.953125 6400
-0.17037037 0.91851852 27000
-0.21495327 0.95327103 10700
-0.2745098 0.94117647 15300
-0.35164835 0.85714286 9100
-0.22926829 0.83902439 20500
-0.19444444 0.92592593 10800
-0.42268041 0.75257732 9700
-0.22295082 0.61311475 30500
-0.32200772 0.58378378 129500
-0.19127086 0.75224647 77900
-0.21818182 0.90909091 5500
-0.076923077 0.97435897 3900
-0.28787879 0.86363636 6600
-0.43859649 0.71052632 11400
-0.39316239 0.87179487 11700
-0.5112782 0.66165414 13300
-0.29543635 0.82786229 124900
-0.58333333 0.45833333 7200
-0.46564885 0.29770992 13100
-0.2173913 1 18400
-0.32835821 0.70149254 13400
-0.19277108 0.92771084 8300
-0.26277372 0.92335766 27400
-0.44936709 0.90506329 15800
-0.27472527 0.87912088 9100
-0.12541806 0.95986622 119600
-0.1835443 0.81012658 15800
-0.21111111 0.93111111 45000
-0.10652921 0.96907216 29100
-0.10126582 0.98734177 7900
-0.37254902 0.74509804 15300
-0.10169492 0.98870056 17700
-0.58426966 0.7752809 8900
-0.41142857 0.65714286 17500
-0.39572193 0.85026738 18700
-0.32820245 0.58361597 383300
-0.28975265 0.58833922 56600
-0.25217391 0.73043478 11500
-0.46491228 0.90350877 11400
-0.28775835 0.71701113 62900
-0.37288136 0.88135593 5900
-0.37704918 0.68852459 12200
-0.20192308 0.93269231 10400
-0.046218487 0.98739496 23800
-0.2231405 1 12100
-0.4375 0.97916667 4800
-0.43956044 0.92307692 9100
-0.43434343 0.96969697 9900
-0.36945813 0.89408867 40600
-0.30718954 0.9869281 15300
-0.27906977 0.9379845 25800
-0.17605634 0.95422535 28400
-0.29652997 0.87697161 31700
-0.59322034 0.74576271 5900
-0.18394649 0.80602007 29900
-0.2970297 0.88118812 10100
-0.11353712 1 22900
-0.098360656 0.96721311 12200
-0.28813559 0.8559322 23600
-0.30837004 0.77092511 22700
-0.43209877 0.87654321 8100
-0.18421053 0.76315789 7600
-0.62162162 0.2972973 7400
-0.27659574 0.87234043 9400
-0.14741036 0.74302789 50200
-0.054945055 0.98901099 9100
-0.021276596 0.55319149 4700
-0.4 0.66923077 13000
-0.008264463 0.99173554 12100
-0.015151515 0.48484848 6600
-0.31155779 0.73869347 19900
-0.48461538 0.9 13000
-0.35664336 0.68531469 14300
-0.2962963 0.87037037 10800
-0.10532838 0.70384139 80700
-0.060509554 0.77707006 31400
-0.13300493 0.66995074 40600
-0.0625 0.79779412 27200
-0.35483871 0.41935484 3100
-0.10614525 0.37988827 17900
-0.40594059 0.45544554 10100
-0.078947368 0.89473684 41800
-0.26315789 0.55263158 15200
-0.021505376 0.47311828 9300
-0.3968254 0.66666667 6300
-0.17615176 0.77777778 36900
-0.29365079 0.42857143 25200
-0.26282051 0.52564103 31200
-0.24383917 0.44487678 77100
-0.28205128 0.82051282 3900
-0.054054054 0.54054054 3700
-0.097826087 0.95217391 46000
-0.11 0.86 10000
-0.31531532 0.82882883 22200
-0.29279279 0.83033033 66600
-0.44244604 0.6618705 27800
-0.2228164 0.84402852 112200
-0.36363636 0.68181818 15400
-0.11570248 0.73002755 72600
-0.46 0.64 5000
-0.029411765 0.97058824 3400
-0.37634409 0.75268817 18600
-0.40740741 0.72839506 8100
-0.18958333 0.71319444 144000
-0.45544554 0.7359736 30300
-0.23574144 0.72623574 26300
-0.021186441 0.59322034 23600
-0.053811659 0.88340807 22300
-0.52136752 0.76923077 11700
-0.48051948 0.66233766 7700
-0.33333333 0.66666667 3300
-0.15027322 0.83879781 36600
-0.08 0.91 10000
-0.18443804 0.86455331 34700
-0.41818182 0.70909091 5500
-0.18918919 0.87162162 14800
-0.34797297 0.64527027 29600
-0.091463415 0.89634146 16400
-0.051612903 0.76774194 15500
-0.022727273 0.54545455 8800
-0.010309278 0.77319588 9700
-0.41304348 0.78985507 13800
-0.2164557 0.7943038 158000
-0.19811321 0.94339623 10600
-0.23414634 0.64390244 20500
-0.32716049 0.7191358 32400
-0.25118483 0.73459716 42200
-0.52898551 0.88405797 13800
-0.12820513 0.37435897 39000
-0.20491803 0.88934426 24400
-0.30188679 0.69811321 5300
-0.34265734 0.61538462 14300
-0.42268041 0.65979381 9700
-0.39215686 0.39215686 5100
-0.29577465 0.86619718 14200
-0.37569061 0.72375691 36200
-0.11594203 0.56521739 6900
-0.072222222 0.91388889 36000
-0.25581395 0.85581395 21500
-0.50591017 0.53900709 42300
-0.18137255 0.68872549 40800
-0.14371257 0.7005988 50100
-0.097744361 0.52255639 26600
-0.3203125 0.421875 25600
-0.376 0.704 12500
-0.090534979 0.81481481 24300
-0.078125 0.6484375 12800
-0.04964539 0.74822695 28200
-0.17021277 0.72340426 4700
-0.043956044 0.97802198 9100
-0.33333333 0.625 2400
-0.18032787 0.73770492 24400
-0.37142857 0.86857143 17500
-0.22968907 0.78335005 99700
-0.55555556 0.57575758 9900
-0.37142857 0.84285714 7000
-0.010309278 0.72164948 9700
-0.35033259 0.51884701 45100
-0.07751938 0.82170543 25800
-0.34394904 0.63694268 31400
-0.044117647 0.91176471 13600
-0.0125 0.4375 8000
-0.26890756 0.60504202 11900
-0.2293578 0.72706422 43600
-0.56140351 0.73684211 5700
-0.16812609 0.5061296 57100
-0.46341463 0.70731707 8200
-0.35955056 0.86516854 8900
-0.37305699 0.51295337 19300
-0.40641711 0.9197861 18700
-0.55932203 0.61016949 5900
-0.31305115 0.61463845 113400
-0.17525773 0.71134021 19400
-0.33727811 0.64497041 16900
-0.30057803 0.82080925 17300
-0.59689922 0.75968992 12900
-0.43055556 0.72916667 14400
-0.37218045 0.67669173 26600
-0.38732394 0.66901408 14200
-0.37762238 0.93706294 14300
-0.48101266 0.59493671 7900
-0.52380952 0.73333333 10500
-0.37209302 0.74418605 43000
-0.44720497 0.7515528 16100
-0.15371901 0.60165289 121000
-0.26153846 0.66153846 26000
-0.46590909 0.75 8800
-0.21203438 0.64756447 34900
-0.55 0.66666667 6000
-0.34302326 0.72674419 17200
-0.22857143 0.78571429 21000
-0.2556391 0.59022556 26600
-0.55102041 0.68367347 9800
-0.14285714 0.875 33600
-0.53658537 0.68292683 4100
-0.48734177 0.59493671 15800
-0.41843972 0.73049645 14100
-0.29069767 0.70348837 17200
-0.1 0.60454545 22000
-0.5147929 0.81065089 33800
-0.090909091 0.625 26400
-0.29274843 0.61862131 111700
-0.29113924 0.7721519 7900
-0.18826135 0.65891473 90300
-0.41176471 0.52673797 37400
-0.24260355 0.76331361 16900
-0.5801105 0.72375691 18100
-0.24285714 0.57142857 42000
+X Y N W1 W2
+0.2475 0.8875 40000 0.55555556 0.99667774
+0.18181818 0.95454545 4400 0.75 1
+0.13550136 0.80758808 36900 0.64 0.8338558
+0.1884058 0.85507246 6900 0.69230769 0.89285714
+0.072625698 0.97206704 89500 0.61538462 1
+0.11002445 0.96821516 245400 0.71111111 1
+0.13636364 0.95454545 4400 0.66666667 1
+0.028571429 1 14000 1 1
+0.086021505 0.97849462 9300 0.75 1
+0.10569106 0.97560976 12300 0.76923077 1
+0.10769231 0.95384615 13000 0.57142857 1
+0.2421875 0.953125 12800 0.80645161 1
+0.1233279 0.7548124 613000 0.79365079 0.74934872
+0.17105263 0.72368421 7600 0.69230769 0.73015873
+0.12121212 1 3300 1 1
+0.22273074 0.78032037 262200 0.73116438 0.79440628
+0.19007392 0.84899683 94700 0.75555556 0.87092568
+0.28571429 0.85714286 2800 0.5 1
+0.18181818 0.93181818 4400 0.625 1
+0.5125 0.48333333 24000 0.38211382 0.58974359
+0.095238095 0.95238095 2100 0.5 1
+0.38888889 0.72222222 1800 0.28571429 1
+0.23636364 0.87272727 5500 0.53846154 0.97619048
+0.36363636 0.88636364 4400 0.6875 1
+0.069444444 0.77777778 7200 0.6 0.79104478
+0.24489796 0.91836735 4900 0.66666667 1
+0.16 0.96 7500 0.75 1
+0.16197183 0.94366197 14200 0.65217391 1
+0.12688698 0.84210526 245100 0.73311897 0.85794393
+0.03030303 1 6600 1 1
+0.16352201 0.94339623 15900 0.65384615 1
+0.27692308 0.78974359 19500 0.61111111 0.85815603
+0.50980392 0.78431373 5100 0.57692308 1
+0.1 0.95 2000 0.5 1
+0.17344173 0.72086721 36900 0.359375 0.79672131
+0.10375276 0.9602649 45300 0.61702128 1
+0.29146919 0.87203791 42200 0.56097561 1
+0.25806452 0.83870968 6200 0.375 1
+0.090909091 1 2200 1 1
+0.4025974 0.87012987 7700 0.67741935 1
+0.11134454 0.8592437 47600 0.52830189 0.90070922
+0.2971246 0.87859425 31300 0.59139785 1
+0.13953488 0.96899225 12900 0.77777778 1
+0.10211268 0.67957746 28400 0.62068966 0.68627451
+0.22826087 0.89130435 9200 0.71428571 0.94366197
+0.063829787 0.86322188 32900 0.57142857 0.88311688
+0.125 0.975 4000 0.8 1
+0.13659148 0.77318296 159600 0.48623853 0.81857765
+0.085271318 0.96899225 12900 0.63636364 1
+0.1887396 0.93090211 156300 0.63389831 1
+0.068181818 0.96306818 35200 0.45833333 1
+0.066179159 0.92431444 273500 0.62983425 0.94518403
+0.16480687 0.79828326 116500 0.53125 0.85097636
+0.28021978 0.72527473 18200 0.41176471 0.84732824
+0.23756906 0.7679558 18100 0.48837209 0.85507246
+0.2742616 0.89873418 23700 0.64615385 0.99418605
+0.06875 0.98125 16000 0.72727273 1
+0.073476703 0.96057348 55800 0.46341463 1
+0.73899371 0.29874214 95400 0.05106383 1
+0.21126761 0.94366197 7100 0.73333333 1
+0.24418605 0.89534884 8600 0.57142857 1
+0.22972973 0.91891892 7400 0.64705882 1
+0.275 0.575 4000 0.27272727 0.68965517
+0.13519814 0.93822844 85800 0.67241379 0.97978437
+0.22222222 0.97222222 3600 0.875 1
+0.11702128 0.96808511 9400 0.72727273 1
+0.15625 0.953125 6400 0.7 1
+0.17037037 0.91851852 27000 0.86956522 0.92857143
+0.21495327 0.95327103 10700 0.86956522 0.97619048
+0.2745098 0.94117647 15300 0.78571429 1
+0.35164835 0.85714286 9100 0.6875 0.94915254
+0.22926829 0.83902439 20500 0.68085106 0.88607595
+0.19444444 0.92592593 10800 0.61904762 1
+0.42268041 0.75257732 9700 0.58536585 0.875
+0.22295082 0.61311475 30500 0.30882353 0.70042194
+0.32200772 0.58378378 129500 0.32613909 0.70615034
+0.19127086 0.75224647 77900 0.65771812 0.77460317
+0.21818182 0.90909091 5500 0.58333333 1
+0.076923077 0.97435897 3900 1 0.97222222
+0.28787879 0.86363636 6600 0.52631579 1
+0.43859649 0.71052632 11400 0.56 0.828125
+0.39316239 0.87179487 11700 0.67391304 1
+0.5112782 0.66165414 13300 0.41176471 0.92307692
+0.29543635 0.82786229 124900 0.59891599 0.92386364
+0.58333333 0.45833333 7200 0.071428571 1
+0.46564885 0.29770992 13100 0.06557377 0.5
+0.2173913 1 18400 1 1
+0.32835821 0.70149254 13400 0.20454545 0.94444444
+0.19277108 0.92771084 8300 0.625 1
+0.26277372 0.92335766 27400 0.81944444 0.96039604
+0.44936709 0.90506329 15800 0.83098592 0.96551724
+0.27472527 0.87912088 9100 0.72 0.93939394
+0.12541806 0.95986622 119600 0.68 1
+0.1835443 0.81012658 15800 0.68965517 0.8372093
+0.21111111 0.93111111 45000 0.75789474 0.97746479
+0.10652921 0.96907216 29100 0.70967742 1
+0.10126582 0.98734177 7900 0.875 1
+0.37254902 0.74509804 15300 0.49122807 0.89583333
+0.10169492 0.98870056 17700 0.88888889 1
+0.58426966 0.7752809 8900 0.61538462 1
+0.41142857 0.65714286 17500 0.36111111 0.86407767
+0.39572193 0.85026738 18700 0.7027027 0.94690265
+0.32820245 0.58361597 383300 0.53338633 0.60815534
+0.28975265 0.58833922 56600 0.15243902 0.76616915
+0.25217391 0.73043478 11500 0.068965517 0.95348837
+0.46491228 0.90350877 11400 0.83018868 0.96721311
+0.28775835 0.71701113 62900 0.51933702 0.796875
+0.37288136 0.88135593 5900 0.68181818 1
+0.37704918 0.68852459 12200 0.36956522 0.88157895
+0.20192308 0.93269231 10400 0.66666667 1
+0.046218487 0.98739496 23800 0.72727273 1
+0.2231405 1 12100 1 1
+0.4375 0.97916667 4800 0.95238095 1
+0.43956044 0.92307692 9100 0.825 1
+0.43434343 0.96969697 9900 0.93023256 1
+0.36945813 0.89408867 40600 0.83333333 0.9296875
+0.30718954 0.9869281 15300 0.95744681 1
+0.27906977 0.9379845 25800 0.77777778 1
+0.17605634 0.95422535 28400 0.74 1
+0.29652997 0.87697161 31700 0.62765957 0.98206278
+0.59322034 0.74576271 5900 0.57142857 1
+0.18394649 0.80602007 29900 0.50909091 0.87295082
+0.2970297 0.88118812 10100 0.6 1
+0.11353712 1 22900 1 1
+0.098360656 0.96721311 12200 0.66666667 1
+0.28813559 0.8559322 23600 0.55882353 0.97619048
+0.30837004 0.77092511 22700 0.55714286 0.86624204
+0.43209877 0.87654321 8100 0.71428571 1
+0.18421053 0.76315789 7600 0.28571429 0.87096774
+0.62162162 0.2972973 7400 0.2826087 0.32142857
+0.27659574 0.87234043 9400 0.57692308 0.98529412
+0.14741036 0.74302789 50200 0.56756757 0.77336449
+0.054945055 0.98901099 9100 0.8 1
+0.021276596 0.55319149 4700 0 0.56521739
+0.4 0.66923077 13000 0.46153846 0.80769231
+0.008264463 0.99173554 12100 0 1
+0.015151515 0.48484848 6600 0 0.49230769
+0.31155779 0.73869347 19900 0.32258065 0.9270073
+0.48461538 0.9 13000 0.79365079 1
+0.35664336 0.68531469 14300 0.50980392 0.7826087
+0.2962963 0.87037037 10800 0.75 0.92105263
+0.10532838 0.70384139 80700 0.50588235 0.72714681
+0.060509554 0.77707006 31400 0.78947368 0.77627119
+0.13300493 0.66995074 40600 0.46296296 0.70170455
+0.0625 0.79779412 27200 0.70588235 0.80392157
+0.35483871 0.41935484 3100 0.27272727 0.5
+0.10614525 0.37988827 17900 0.21052632 0.4
+0.40594059 0.45544554 10100 0.36585366 0.51666667
+0.078947368 0.89473684 41800 0.72727273 0.90909091
+0.26315789 0.55263158 15200 0.425 0.59821429
+0.021505376 0.47311828 9300 0 0.48351648
+0.3968254 0.66666667 6300 0.44 0.81578947
+0.17615176 0.77777778 36900 0.55384615 0.82565789
+0.29365079 0.42857143 25200 0.31081081 0.47752809
+0.26282051 0.52564103 31200 0.46341463 0.54782609
+0.24383917 0.44487678 77100 0.40425532 0.45797599
+0.28205128 0.82051282 3900 0.45454545 0.96428571
+0.054054054 0.54054054 3700 0.5 0.54285714
+0.097826087 0.95217391 46000 0.73333333 0.97590361
+0.11 0.86 10000 0.63636364 0.88764045
+0.31531532 0.82882883 22200 0.45714286 1
+0.29279279 0.83033033 66600 0.76410256 0.85774947
+0.44244604 0.6618705 27800 0.43089431 0.84516129
+0.2228164 0.84402852 112200 0.772 0.8646789
+0.36363636 0.68181818 15400 0.48214286 0.79591837
+0.11570248 0.73002755 72600 0.58333333 0.74922118
+0.46 0.64 5000 0.47826087 0.77777778
+0.029411765 0.97058824 3400 0 1
+0.37634409 0.75268817 18600 0.42857143 0.94827586
+0.40740741 0.72839506 8100 0.48484848 0.89583333
+0.18958333 0.71319444 144000 0.58974359 0.74207369
+0.45544554 0.7359736 30300 0.43478261 0.98787879
+0.23574144 0.72623574 26300 0.41935484 0.82089552
+0.021186441 0.59322034 23600 0.4 0.5974026
+0.053811659 0.88340807 22300 0.5 0.90521327
+0.52136752 0.76923077 11700 0.6557377 0.89285714
+0.48051948 0.66233766 7700 0.54054054 0.775
+0.33333333 0.66666667 3300 0.54545455 0.72727273
+0.15027322 0.83879781 36600 0.58181818 0.88424437
+0.08 0.91 10000 0.25 0.9673913
+0.18443804 0.86455331 34700 0.484375 0.95053004
+0.41818182 0.70909091 5500 0.56521739 0.8125
+0.18918919 0.87162162 14800 0.75 0.9
+0.34797297 0.64527027 29600 0.45631068 0.74611399
+0.091463415 0.89634146 16400 0.73333333 0.91275168
+0.051612903 0.76774194 15500 0.5 0.78231293
+0.022727273 0.54545455 8800 0 0.55813953
+0.010309278 0.77319588 9700 0 0.78125
+0.41304348 0.78985507 13800 0.57894737 0.9382716
+0.2164557 0.7943038 158000 0.5497076 0.86187399
+0.19811321 0.94339623 10600 0.71428571 1
+0.23414634 0.64390244 20500 0.375 0.72611465
+0.32716049 0.7191358 32400 0.50943396 0.82110092
+0.25118483 0.73459716 42200 0.52830189 0.80379747
+0.52898551 0.88405797 13800 0.78082192 1
+0.12820513 0.37435897 39000 0.38 0.37352941
+0.20491803 0.88934426 24400 0.64 0.95360825
+0.30188679 0.69811321 5300 0.5625 0.75675676
+0.34265734 0.61538462 14300 0.44897959 0.70212766
+0.42268041 0.65979381 9700 0.41463415 0.83928571
+0.39215686 0.39215686 5100 0.35 0.41935484
+0.29577465 0.86619718 14200 0.69047619 0.94
+0.37569061 0.72375691 36200 0.47058824 0.87610619
+0.11594203 0.56521739 6900 0.375 0.59016393
+0.072222222 0.91388889 36000 0.57692308 0.94011976
+0.25581395 0.85581395 21500 0.70909091 0.90625
+0.50591017 0.53900709 42300 0.31775701 0.76555024
+0.18137255 0.68872549 40800 0.59459459 0.70958084
+0.14371257 0.7005988 50100 0.51388889 0.73193473
+0.097744361 0.52255639 26600 0.34615385 0.54166667
+0.3203125 0.421875 25600 0.2804878 0.48850575
+0.376 0.704 12500 0.46808511 0.84615385
+0.090534979 0.81481481 24300 0.54545455 0.84162896
+0.078125 0.6484375 12800 0.8 0.63559322
+0.04964539 0.74822695 28200 0.57142857 0.75746269
+0.17021277 0.72340426 4700 0 0.87179487
+0.043956044 0.97802198 9100 1 0.97701149
+0.33333333 0.625 2400 0.5 0.6875
+0.18032787 0.73770492 24400 0.40909091 0.81
+0.37142857 0.86857143 17500 0.73846154 0.94545455
+0.22968907 0.78335005 99700 0.45851528 0.88020833
+0.55555556 0.57575758 9900 0.34545455 0.86363636
+0.37142857 0.84285714 7000 0.65384615 0.95454545
+0.010309278 0.72164948 9700 0 0.72916667
+0.35033259 0.51884701 45100 0.3164557 0.62798635
+0.07751938 0.82170543 25800 1 0.80672269
+0.34394904 0.63694268 31400 0.37037037 0.77669903
+0.044117647 0.91176471 13600 0.66666667 0.92307692
+0.0125 0.4375 8000 0 0.44303797
+0.26890756 0.60504202 11900 0.375 0.68965517
+0.2293578 0.72706422 43600 0.51 0.79166667
+0.56140351 0.73684211 5700 0.625 0.88
+0.16812609 0.5061296 57100 0.3125 0.54526316
+0.46341463 0.70731707 8200 0.57894737 0.81818182
+0.35955056 0.86516854 8900 0.6875 0.96491228
+0.37305699 0.51295337 19300 0.59722222 0.46280992
+0.40641711 0.9197861 18700 0.80263158 1
+0.55932203 0.61016949 5900 0.45454545 0.80769231
+0.31305115 0.61463845 113400 0.53521127 0.6508344
+0.17525773 0.71134021 19400 0.52941176 0.75
+0.33727811 0.64497041 16900 0.52631579 0.70535714
+0.30057803 0.82080925 17300 0.65384615 0.89256198
+0.59689922 0.75968992 12900 0.5974026 1
+0.43055556 0.72916667 14400 0.56451613 0.85365854
+0.37218045 0.67669173 26600 0.50505051 0.77844311
+0.38732394 0.66901408 14200 0.50909091 0.77011494
+0.37762238 0.93706294 14300 0.83333333 1
+0.48101266 0.59493671 7900 0.31578947 0.85365854
+0.52380952 0.73333333 10500 0.65454545 0.82
+0.37209302 0.74418605 43000 0.59375 0.83333333
+0.44720497 0.7515528 16100 0.63888889 0.84269663
+0.15371901 0.60165289 121000 0.47849462 0.62402344
+0.26153846 0.66153846 26000 0.42647059 0.74479167
+0.46590909 0.75 8800 0.58536585 0.89361702
+0.21203438 0.64756447 34900 0.52702703 0.68
+0.55 0.66666667 6000 0.57575758 0.77777778
+0.34302326 0.72674419 17200 0.44067797 0.87610619
+0.22857143 0.78571429 21000 0.54166667 0.85802469
+0.2556391 0.59022556 26600 0.48529412 0.62626263
+0.55102041 0.68367347 9800 0.5 0.90909091
+0.14285714 0.875 33600 0.64583333 0.91319444
+0.53658537 0.68292683 4100 0.5 0.89473684
+0.48734177 0.59493671 15800 0.48051948 0.7037037
+0.41843972 0.73049645 14100 0.52542373 0.87804878
+0.29069767 0.70348837 17200 0.46 0.80327869
+0.1 0.60454545 22000 0.5 0.61616162
+0.5147929 0.81065089 33800 0.7183908 0.90853659
+0.090909091 0.625 26400 0.45833333 0.64166667
+0.29274843 0.61862131 111700 0.59327217 0.62911392
+0.29113924 0.7721519 7900 0.60869565 0.83928571
+0.18826135 0.65891473 90300 0.46470588 0.70395634
+0.41176471 0.52673797 37400 0.49350649 0.55
+0.24260355 0.76331361 16900 0.51219512 0.84375
+0.5801105 0.72375691 18100 0.61904762 0.86842105
+0.24285714 0.57142857 42000 0.41176471 0.62264151
diff --git a/data/wallace.txt b/data/wallace.txt
new file mode 100644
index 0000000..fb94f55
--- /dev/null
+++ b/data/wallace.txt
@@ -0,0 +1,1010 @@
+FIPS X Y
+1001 0.71 0.42
+1003 0.76 0.21
+1005 0.69 0.52
+1007 0.8 0.31
+1009 0.73 0.03
+1011 0.5 0.72
+1013 0.76 0.45
+1015 0.72 0.19
+1017 0.74 0.37
+1019 0.84 0.1
+1021 0.74 0.16
+1023 0.69 0.5
+1025 0.71 0.5
+1027 0.81 0.16
+1029 0.83 0.06
+1031 0.83 0.21
+1033 0.73 0.19
+1035 0.73 0.45
+1037 0.73 0.36
+1039 0.87 0.16
+1041 0.82 0.31
+1043 0.64 0.01
+1045 0.84 0.18
+1047 0.53 0.58
+1049 0.55 0.02
+1051 0.76 0.34
+1053 0.79 0.33
+1055 0.69 0.15
+1057 0.75 0.16
+1059 0.64 0.06
+1061 0.92 0.15
+1063 0.39 0.81
+1065 0.55 0.71
+1067 0.8 0.46
+1069 0.84 0.27
+1071 0.78 0.06
+1073 0.52 0.35
+1075 0.89 0.15
+1077 0.71 0.12
+1079 0.82 0.22
+1081 0.59 0.37
+1083 0.81 0.21
+1085 0.56 0.81
+1087 0.25 0.83
+1089 0.57 0.19
+1091 0.57 0.62
+1093 0.76 0.03
+1095 0.76 0.02
+1097 0.67 0.32
+1099 0.7 0.51
+1101 0.58 0.38
+1103 0.76 0.13
+1105 0.5 0.66
+1107 0.7 0.45
+1109 0.68 0.41
+1111 0.75 0.26
+1113 0.67 0.49
+1117 0.72 0.19
+1115 0.73 0.17
+1119 0.45 0.76
+1121 0.73 0.32
+1123 0.77 0.29
+1125 0.65 0.29
+1127 0.74 0.1
+1129 0.8 0.33
+1131 0.56 0.78
+1133 0.54 0.01
+5001 0.5 0.25
+5003 0.56 0.37
+5005 0.22 0.0001
+5007 0.25 0.0001
+5009 0.29 0.0001
+5011 0.53 0.35
+5013 0.55 0.35
+5015 0.23 0.0001
+5017 0.39 0.57
+5019 0.39 0.26
+5021 0.36 0.0001
+5023 0.4 0.0001
+5025 0.71 0.25
+5027 0.47 0.36
+5029 0.3 0.22
+5031 0.43 0.03
+5033 0.4 0.03
+5035 0.44 0.59
+5037 0.54 0.3
+5039 0.47 0.4
+5041 0.43 0.48
+5043 0.49 0.34
+5045 0.4 0.11
+5047 0.46 0.01
+5049 0.33 0.0001
+5051 0.34 0.11
+5053 0.6 0.07
+5055 0.37 0.0001
+5057 0.43 0.37
+5059 0.51 0.15
+5061 0.41 0.21
+5063 0.35 0.03
+5065 0.37 0.01
+5067 0.51 0.15
+5069 0.34 0.44
+5071 0.33 0.02
+5073 0.48 0.45
+5075 0.45 0.01
+5077 0.39 0.61
+5079 0.55 0.48
+5081 0.44 0.31
+5083 0.33 0.02
+5085 0.52 0.24
+5087 0.21 0.0001
+5089 0.27 0.0001
+5091 0.48 0.26
+5093 0.4 0.3
+5095 0.48 0.49
+5097 0.39 0.0001
+5099 0.45 0.36
+5101 0.2 0.0001
+5103 0.42 0.38
+5105 0.4 0.03
+5107 0.37 0.57
+5109 0.47 0.04
+5111 0.52 0.11
+5113 0.35 0.0001
+5115 0.32 0.03
+5117 0.56 0.19
+5119 0.32 0.21
+5121 0.38 0.01
+5125 0.49 0.06
+5127 0.36 0.0001
+5129 0.22 0.0001
+5131 0.32 0.07
+5133 0.39 0.09
+5135 0.38 0.0001
+5123 0.47 0.57
+5137 0.37 0.0001
+5139 0.46 0.3
+5141 0.33 0.01
+5143 0.23 0.01
+5145 0.42 0.04
+5147 0.48 0.41
+5149 0.37 0.04
+12001 0.31 0.26
+12003 0.72 0.22
+12005 0.62 0.15
+12007 0.6 0.23
+12009 0.28 0.11
+12011 0.16 0.16
+12013 0.76 0.16
+12015 0.19 0.06
+12017 0.36 0.18
+12019 0.44 0.14
+12021 0.28 0.14
+12023 0.55 0.3
+12025 0.15 0.15
+12027 0.5 0.22
+12029 0.74 0.14
+12031 0.36 0.23
+12033 0.54 0.21
+12035 0.46 0.38
+12037 0.61 0.21
+12039 0.49 0.59
+12041 0.74 0.11
+12043 0.55 0.34
+12045 0.72 0.24
+12047 0.58 0.45
+12049 0.56 0.09
+12051 0.49 0.24
+12053 0.4 0.21
+12055 0.33 0.21
+12057 0.33 0.14
+12059 0.87 0.04
+12061 0.24 0.21
+12063 0.7 0.31
+12065 0.51 0.59
+12067 0.76 0.12
+12069 0.34 0.19
+12071 0.28 0.16
+12073 0.39 0.33
+12075 0.62 0.3
+12077 0.77 0.15
+12079 0.57 0.48
+12081 0.24 0.15
+12083 0.42 0.35
+12085 0.24 0.2
+12087 0.29 0.1
+12089 0.56 0.25
+12091 0.59 0.07
+12093 0.53 0.16
+12095 0.27 0.15
+12097 0.36 0.1
+12099 0.19 0.23
+12101 0.3 0.11
+12103 0.16 0.09
+12105 0.42 0.18
+12107 0.47 0.31
+12113 0.67 0.07
+12115 0.15 0.1
+12117 0.3 0.25
+12109 0.41 0.27
+12111 0.26 0.32
+12119 0.57 0.26
+12121 0.66 0.27
+12123 0.66 0.25
+12125 0.72 0.27
+12127 0.25 0.17
+12129 0.71 0.28
+12131 0.72 0.14
+12133 0.75 0.19
+13001 0.63 0.23
+13003 0.61 0.29
+13005 0.69 0.13
+13007 0.62 0.59
+13009 0.38 0.4
+13011 0.67 0.07
+13013 0.53 0.18
+13015 0.49 0.15
+13017 0.54 0.33
+13019 0.73 0.15
+13021 0.42 0.33
+13023 0.68 0.28
+13025 0.76 0.13
+13027 0.64 0.49
+13029 0.6 0.37
+13031 0.5 0.37
+13033 0.37 0.66
+13035 0.49 0.47
+13037 0.51 0.65
+13039 0.51 0.4
+13043 0.59 0.35
+13045 0.54 0.19
+13047 0.69 0.02
+13049 0.6 0.32
+13051 0.32 0.34
+13053 0.58 0.18
+13055 0.56 0.09
+13057 0.45 0.04
+13059 0.23 0.25
+13061 0.48 0.62
+13063 0.5 0.1
+13065 0.64 0.38
+13067 0.39 0.07
+13069 0.6 0.27
+13071 0.68 0.24
+13073 0.46 0.36
+13075 0.68 0.3
+13077 0.51 0.36
+13079 0.55 0.58
+13081 0.63 0.43
+13083 0.73 0.02
+13085 0.53 0.0001
+13089 0.23 0.09
+13087 0.65 0.42
+13091 0.6 0.28
+13093 0.57 0.53
+13095 0.5 0.34
+13097 0.57 0.15
+13099 0.72 0.52
+13101 0.83 0.28
+13103 0.65 0.35
+13105 0.6 0.34
+13107 0.54 0.33
+13109 0.59 0.37
+13111 0.2 0.0001
+13113 0.57 0.3
+13115 0.46 0.14
+13117 0.54 0.0001
+13119 0.64 0.12
+13121 0.21 0.35
+13123 0.31 0.0001
+13125 0.76 0.29
+13127 0.43 0.28
+13129 0.51 0.06
+13131 0.66 0.37
+13133 0.35 0.53
+13135 0.51 0.08
+13137 0.53 0.05
+13139 0.41 0.11
+13141 0.3 0.75
+13143 0.59 0.08
+13145 0.47 0.55
+13147 0.67 0.25
+13149 0.64 0.23
+13151 0.45 0.43
+13153 0.51 0.22
+13155 0.68 0.37
+13157 0.56 0.13
+13159 0.42 0.54
+13161 0.67 0.21
+13163 0.4 0.56
+13165 0.49 0.5
+13167 0.71 0.33
+13169 0.5 0.5
+13171 0.51 0.4
+13173 0.66 0.3
+13175 0.52 0.37
+13177 0.53 0.63
+13179 0.39 0.42
+13181 0.59 0.49
+13183 0.59 0.33
+13185 0.51 0.32
+13187 0.49 0.02
+13193 0.5 0.63
+13195 0.67 0.19
+13197 0.66 0.6
+13189 0.42 0.42
+13191 0.4 0.58
+13199 0.47 0.5
+13201 0.82 0.29
+13205 0.65 0.51
+13207 0.44 0.48
+13209 0.63 0.41
+13211 0.47 0.48
+13213 0.46 0.01
+13215 0.46 0.24
+13217 0.45 0.35
+13219 0.55 0.22
+13221 0.67 0.45
+13223 0.6 0.09
+13225 0.42 0.59
+13227 0.37 0.04
+13229 0.66 0.22
+13231 0.6 0.45
+13233 0.53 0.16
+13235 0.59 0.43
+13237 0.43 0.54
+13239 0.61 0.64
+13241 0.53 0.01
+13243 0.48 0.62
+13245 0.26 0.31
+13247 0.48 0.27
+13249 0.57 0.56
+13251 0.44 0.52
+13253 0.77 0.38
+13255 0.45 0.28
+13257 0.55 0.14
+13259 0.56 0.71
+13261 0.53 0.53
+13263 0.45 0.7
+13265 0.36 0.62
+13267 0.65 0.32
+13269 0.6 0.48
+13271 0.59 0.34
+13273 0.5 0.64
+13275 0.51 0.41
+13277 0.58 0.3
+13279 0.6 0.28
+13281 0.21 0.0001
+13283 0.57 0.33
+13285 0.5 0.33
+13287 0.69 0.37
+13289 0.5 0.6
+13291 0.29 0.0001
+13293 0.55 0.28
+13295 0.61 0.05
+13297 0.58 0.3
+13299 0.58 0.25
+13301 0.44 0.63
+13303 0.43 0.57
+13305 0.6 0.21
+13307 0.69 0.64
+13309 0.56 0.33
+13311 0.49 0.04
+13313 0.34 0.05
+13315 0.68 0.33
+13317 0.48 0.51
+13319 0.55 0.48
+13321 0.7 0.48
+22001 0.57 0.2
+22003 0.58 0.24
+22005 0.57 0.32
+22007 0.47 0.41
+22009 0.55 0.28
+22011 0.56 0.22
+22013 0.56 0.49
+22015 0.59 0.25
+22017 0.42 0.36
+22019 0.46 0.21
+22021 0.61 0.28
+22023 0.64 0.06
+22025 0.64 0.35
+22027 0.55 0.5
+22029 0.61 0.46
+22031 0.49 0.57
+22033 0.45 0.32
+22035 0.4 0.61
+22037 0.54 0.54
+22039 0.64 0.27
+22041 0.76 0.41
+22043 0.63 0.24
+22045 0.42 0.29
+22047 0.44 0.49
+22049 0.6 0.32
+22051 0.46 0.15
+22053 0.5 0.21
+22059 0.66 0.12
+22055 0.39 0.24
+22057 0.51 0.12
+22061 0.48 0.42
+22063 0.81 0.15
+22065 0.42 0.65
+22067 0.6 0.47
+22069 0.47 0.44
+22071 0.33 0.37
+22073 0.48 0.32
+22075 0.75 0.28
+22077 0.47 0.54
+22079 0.46 0.3
+22081 0.66 0.48
+22083 0.68 0.44
+22085 0.66 0.24
+22087 0.69 0.07
+22089 0.48 0.27
+22091 0.53 0.56
+22093 0.42 0.49
+22095 0.44 0.52
+22097 0.5 0.43
+22099 0.49 0.37
+22101 0.41 0.31
+22103 0.55 0.27
+22105 0.62 0.34
+22107 0.49 0.65
+22109 0.47 0.17
+22111 0.64 0.37
+22113 0.53 0.13
+22115 0.64 0.13
+22117 0.7 0.34
+22119 0.62 0.35
+22121 0.49 0.49
+22123 0.78 0.22
+22125 0.4 0.66
+22127 0.64 0.31
+28001 0.5 0.5
+28003 0.69 0.13
+28005 0.62 0.54
+28007 0.69 0.45
+28009 0.61 0.47
+28011 0.44 0.67
+28013 0.88 0.27
+28015 0.67 0.58
+28017 0.79 0.39
+28019 0.8 0.3
+28021 0.33 0.76
+28023 0.78 0.39
+28025 0.64 0.51
+28027 0.34 0.68
+28029 0.59 0.52
+28031 0.76 0.35
+28033 0.64 0.61
+28035 0.61 0.28
+28037 0.71 0.41
+28039 0.91 0.12
+28041 0.83 0.23
+28043 0.61 0.49
+28045 0.67 0.16
+28047 0.62 0.16
+28049 0.53 0.4
+28051 0.41 0.72
+28053 0.59 0.7
+28055 0.48 0.67
+28057 0.84 0.06
+28059 0.75 0.2
+28061 0.7 0.5
+28063 0.33 0.75
+28065 0.6 0.55
+28067 0.68 0.26
+28069 0.75 0.59
+28071 0.54 0.34
+28073 0.83 0.16
+28075 0.73 0.35
+28077 0.73 0.38
+28079 0.72 0.41
+28081 0.68 0.25
+28083 0.49 0.64
+28085 0.73 0.31
+28087 0.62 0.38
+28089 0.43 0.72
+28091 0.7 0.34
+28093 0.45 0.7
+28095 0.75 0.35
+28097 0.69 0.45
+28099 0.82 0.22
+28101 0.81 0.32
+28103 0.56 0.72
+28105 0.57 0.44
+28107 0.52 0.56
+28109 0.73 0.23
+28111 0.79 0.28
+28113 0.58 0.44
+28115 0.78 0.19
+28117 0.81 0.12
+28119 0.56 0.63
+28121 0.75 0.37
+28123 0.75 0.38
+28125 0.49 0.7
+28127 0.72 0.35
+28129 0.85 0.23
+28131 0.79 0.24
+28133 0.52 0.67
+28135 0.6 0.64
+28137 0.61 0.58
+28139 0.79 0.18
+28141 0.82 0.05
+28143 0.34 0.79
+28145 0.77 0.18
+28147 0.66 0.45
+28149 0.51 0.47
+28151 0.41 0.55
+28153 0.81 0.36
+28155 0.84 0.25
+28157 0.38 0.71
+28159 0.77 0.43
+28161 0.66 0.44
+28163 0.61 0.59
+37001 0.39 0.17
+37003 0.26 0.07
+37005 0.24 0.03
+37007 0.45 0.48
+37009 0.1 0.01
+37011 0.15 0.01
+37013 0.49 0.37
+37015 0.44 0.59
+37017 0.46 0.41
+37019 0.38 0.35
+37021 0.25 0.11
+37023 0.26 0.07
+37025 0.26 0.16
+37027 0.25 0.07
+37029 0.55 0.42
+37031 0.27 0.12
+37033 0.47 0.48
+37035 0.22 0.09
+37037 0.31 0.31
+37039 0.13 0.02
+37041 0.46 0.47
+37043 0.12 0.01
+37045 0.43 0.22
+37047 0.45 0.33
+37049 0.47 0.28
+37051 0.33 0.25
+37053 0.57 0.32
+37055 0.33 0.07
+37057 0.32 0.11
+37059 0.32 0.12
+37061 0.5 0.38
+37063 0.32 0.32
+37065 0.41 0.52
+37067 0.23 0.24
+37069 0.57 0.44
+37071 0.33 0.13
+37073 0.44 0.54
+37075 0.12 0.0001
+37077 0.48 0.44
+37079 0.57 0.5
+37081 0.23 0.21
+37083 0.47 0.54
+37085 0.42 0.27
+37087 0.25 0.02
+37089 0.24 0.05
+37091 0.33 0.59
+37093 0.34 0.49
+37095 0.42 0.42
+37097 0.37 0.18
+37099 0.14 0.02
+37101 0.45 0.22
+37103 0.53 0.47
+37105 0.42 0.22
+37107 0.51 0.4
+37109 0.24 0.12
+37113 0.18 0.02
+37115 0.16 0.01
+37117 0.47 0.5
+37111 0.29 0.05
+37119 0.19 0.25
+37121 0.12 0.0001
+37123 0.29 0.25
+37125 0.27 0.26
+37127 0.48 0.4
+37129 0.34 0.28
+37131 0.38 0.64
+37133 0.45 0.13
+37135 0.21 0.24
+37137 0.42 0.37
+37139 0.47 0.4
+37141 0.48 0.48
+37143 0.51 0.47
+37145 0.46 0.35
+37147 0.41 0.44
+37149 0.27 0.13
+37151 0.27 0.08
+37153 0.43 0.3
+37155 0.34 0.3
+37157 0.39 0.21
+37159 0.28 0.17
+37161 0.27 0.12
+37163 0.28 0.36
+37165 0.34 0.41
+37167 0.26 0.11
+37169 0.32 0.1
+37171 0.22 0.06
+37173 0.16 0.01
+37175 0.27 0.05
+37177 0.32 0.44
+37179 0.35 0.21
+37181 0.46 0.44
+37183 0.26 0.26
+37185 0.43 0.63
+37187 0.39 0.45
+37189 0.12 0.01
+37191 0.44 0.37
+37193 0.15 0.06
+37195 0.49 0.4
+37197 0.25 0.05
+37199 0.14 0.01
+45001 0.55 0.32
+45003 0.32 0.26
+45005 0.24 0.63
+45007 0.53 0.2
+45009 0.34 0.56
+45011 0.4 0.43
+45013 0.18 0.38
+45015 0.35 0.5
+45017 0.32 0.67
+45019 0.24 0.36
+45021 0.54 0.21
+45023 0.33 0.4
+45025 0.43 0.37
+45027 0.27 0.68
+45029 0.33 0.51
+45031 0.37 0.44
+45033 0.32 0.46
+45035 0.33 0.49
+45037 0.26 0.58
+45039 0.22 0.6
+45041 0.31 0.43
+45043 0.26 0.52
+45045 0.25 0.18
+45047 0.41 0.3
+45049 0.28 0.54
+45051 0.46 0.27
+45053 0.35 0.62
+45055 0.37 0.4
+45057 0.38 0.27
+45059 0.35 0.3
+45061 0.39 0.66
+45063 0.35 0.17
+45067 0.22 0.55
+45069 0.33 0.48
+45065 0.34 0.62
+45071 0.35 0.35
+45073 0.51 0.11
+45075 0.34 0.6
+45077 0.33 0.1
+45079 0.14 0.32
+45081 0.44 0.37
+45083 0.37 0.22
+45085 0.29 0.47
+45087 0.46 0.3
+45089 0.25 0.66
+45091 0.35 0.28
+47001 0.2 0.03
+47003 0.49 0.12
+47005 0.47 0.03
+47007 0.23 0.06
+47009 0.2 0.05
+47011 0.3 0.05
+47013 0.18 0.01
+47015 0.48 0.02
+47017 0.37 0.14
+47019 0.21 0.01
+47021 0.63 0.07
+47023 0.47 0.13
+47025 0.15 0.02
+47027 0.23 0.02
+47029 0.15 0.03
+47031 0.43 0.04
+47033 0.64 0.24
+47035 0.24 0.0001
+47037 0.35 0.19
+47041 0.39 0.03
+47039 0.4 0.06
+47043 0.51 0.07
+47045 0.55 0.15
+47047 0.46 0.69
+47049 0.23 0.0001
+47051 0.54 0.09
+47053 0.47 0.22
+47055 0.53 0.18
+47057 0.14 0.01
+47059 0.2 0.03
+47061 0.46 0.0001
+47063 0.2 0.06
+47065 0.38 0.2
+47067 0.12 0.01
+47069 0.5 0.39
+47071 0.36 0.06
+47073 0.18 0.03
+47075 0.49 0.61
+47077 0.3 0.11
+47079 0.4 0.15
+47081 0.56 0.05
+47083 0.52 0.08
+47085 0.48 0.05
+47087 0.34 0.0001
+47089 0.15 0.04
+47091 0.1 0.01
+47093 0.2 0.09
+47095 0.52 0.23
+47097 0.53 0.38
+47099 0.38 0.02
+47101 0.39 0.02
+47103 0.58 0.13
+47105 0.25 0.02
+47111 0.28 0.01
+47113 0.45 0.34
+47115 0.43 0.06
+47117 0.55 0.12
+47119 0.56 0.2
+47107 0.22 0.05
+47109 0.4 0.07
+47121 0.27 0.05
+47123 0.14 0.04
+47125 0.39 0.19
+47127 0.6 0.09
+47129 0.27 0.02
+47131 0.5 0.12
+47133 0.29 0.01
+47135 0.39 0.04
+47137 0.13 0.0001
+47139 0.19 0.0001
+47141 0.3 0.02
+47143 0.37 0.04
+47145 0.3 0.04
+47147 0.49 0.18
+47149 0.46 0.14
+47151 0.18 0.0001
+47153 0.45 0.0001
+47155 0.14 0.01
+47157 0.33 0.36
+47159 0.42 0.05
+47161 0.42 0.03
+47163 0.25 0.02
+47165 0.46 0.11
+47167 0.59 0.39
+47169 0.41 0.17
+47171 0.17 0.0001
+47173 0.15 0.0001
+47175 0.45 0.01
+47177 0.49 0.05
+47179 0.22 0.04
+47181 0.29 0.02
+47183 0.48 0.07
+47185 0.37 0.03
+47187 0.5 0.2
+47189 0.5 0.15
+48001 0.34 0.3
+48003 0.36 0.02
+48005 0.38 0.18
+48007 0.15 0.04
+48009 0.18 0.0001
+48011 0.22 0.0001
+48013 0.15 0.01
+48015 0.25 0.21
+48017 0.22 0.04
+48019 0.24 0.01
+48021 0.19 0.31
+48023 0.2 0.04
+48025 0.11 0.02
+48027 0.17 0.11
+48029 0.09 0.07
+48031 0.15 0.03
+48033 0.29 0.0001
+48035 0.19 0.03
+48037 0.37 0.24
+48039 0.27 0.12
+48041 0.16 0.21
+48043 0.16 0.01
+48045 0.19 0.05
+48047 0.06 0.0001
+48049 0.19 0.03
+48051 0.21 0.31
+48053 0.16 0.02
+48055 0.16 0.15
+48057 0.2 0.05
+48059 0.24 0.0001
+48061 0.07 0.01
+48063 0.37 0.38
+48065 0.21 0.0001
+48067 0.39 0.3
+48069 0.22 0.04
+48071 0.37 0.22
+48073 0.39 0.26
+48075 0.23 0.06
+48077 0.21 0.01
+48079 0.28 0.05
+48081 0.18 0.0001
+48083 0.28 0.03
+48085 0.24 0.11
+48087 0.25 0.09
+48089 0.21 0.25
+48091 0.11 0.02
+48093 0.17 0.0001
+48095 0.18 0.0001
+48097 0.18 0.04
+48099 0.2 0.05
+48101 0.21 0.08
+48103 0.42 0.05
+48105 0.21 0.03
+48107 0.14 0.08
+48109 0.19 0.0001
+48111 0.21 0.01
+48113 0.15 0.14
+48115 0.2 0.06
+48123 0.15 0.14
+48117 0.15 0.02
+48119 0.25 0.14
+48121 0.17 0.06
+48125 0.19 0.05
+48127 0.11 0.01
+48129 0.16 0.05
+48131 0.03 0.0001
+48133 0.16 0.02
+48135 0.35 0.05
+48137 0.13 0.0001
+48141 0.08 0.03
+48139 0.24 0.24
+48143 0.15 0.01
+48145 0.24 0.33
+48147 0.23 0.1
+48149 0.27 0.14
+48151 0.11 0.05
+48153 0.23 0.07
+48155 0.17 0.09
+48157 0.21 0.2
+48159 0.25 0.08
+48161 0.26 0.39
+48163 0.13 0.01
+48165 0.29 0.03
+48167 0.2 0.21
+48169 0.23 0.05
+48171 0.11 0.0001
+48173 0.38 0.01
+48175 0.1 0.12
+48177 0.22 0.18
+48179 0.22 0.03
+48181 0.2 0.09
+48183 0.35 0.23
+48185 0.28 0.38
+48187 0.14 0.11
+48189 0.22 0.05
+48191 0.2 0.13
+48193 0.16 0.0001
+48195 0.2 0.0001
+48197 0.21 0.12
+48199 0.45 0.16
+48201 0.18 0.2
+48203 0.38 0.43
+48205 0.23 0.0001
+48207 0.19 0.06
+48209 0.1 0.06
+48211 0.15 0.0001
+48213 0.31 0.21
+48215 0.07 0.0001
+48217 0.25 0.16
+48219 0.24 0.06
+48221 0.19 0.01
+48223 0.3 0.13
+48225 0.33 0.38
+48227 0.27 0.04
+48229 0.19 0.0001
+48231 0.27 0.16
+48233 0.29 0.02
+48235 0.19 0.01
+48237 0.2 0.01
+48239 0.27 0.12
+48241 0.4 0.25
+48243 0.13 0.0001
+48245 0.28 0.23
+48247 0.03 0.0001
+48249 0.09 0.01
+48251 0.22 0.05
+48253 0.19 0.06
+48255 0.16 0.03
+48257 0.29 0.3
+48259 0.15 0.01
+48261 0.04 0.0001
+48263 0.3 0.03
+48265 0.16 0.04
+48267 0.19 0.0001
+48269 0.32 0.07
+48271 0.11 0.08
+48273 0.08 0.03
+48275 0.15 0.07
+48283 0.1 0.0001
+48277 0.27 0.19
+48279 0.23 0.08
+48281 0.16 0.03
+48285 0.27 0.1
+48287 0.21 0.24
+48289 0.29 0.38
+48291 0.35 0.23
+48293 0.25 0.28
+48295 0.12 0.0001
+48297 0.21 0.0001
+48299 0.16 0.01
+48301 0.49 0.04
+48303 0.18 0.08
+48305 0.19 0.06
+48313 0.32 0.33
+48315 0.34 0.52
+48317 0.43 0.04
+48319 0.11 0.01
+48321 0.21 0.21
+48323 0.07 0.0001
+48307 0.14 0.04
+48309 0.18 0.16
+48311 0.23 0.0001
+48325 0.14 0.01
+48327 0.12 0.01
+48329 0.24 0.09
+48331 0.24 0.18
+48333 0.18 0.0001
+48335 0.17 0.07
+48337 0.18 0.0001
+48339 0.37 0.23
+48341 0.25 0.0001
+48343 0.32 0.27
+48345 0.27 0.09
+48347 0.32 0.27
+48349 0.22 0.25
+48351 0.43 0.33
+48353 0.2 0.04
+48355 0.11 0.05
+48357 0.16 0.0001
+48359 0.29 0.0001
+48361 0.42 0.1
+48363 0.17 0.04
+48365 0.45 0.31
+48367 0.21 0.02
+48369 0.24 0.03
+48371 0.22 0.01
+48373 0.37 0.29
+48375 0.2 0.07
+48377 0.08 0.0001
+48379 0.25 0.1
+48381 0.17 0.0001
+48383 0.26 0.07
+48385 0.19 0.0001
+48387 0.3 0.24
+48389 0.21 0.04
+48391 0.15 0.09
+48393 0.22 0.0001
+48395 0.2 0.41
+48397 0.29 0.24
+48399 0.17 0.03
+48401 0.38 0.29
+48403 0.38 0.26
+48405 0.46 0.39
+48407 0.3 0.52
+48409 0.15 0.02
+48411 0.22 0.01
+48413 0.19 0.03
+48415 0.22 0.03
+48417 0.18 0.03
+48419 0.47 0.26
+48421 0.27 0.0001
+48423 0.31 0.27
+48425 0.23 0.0001
+48427 0.01 0.0001
+48429 0.17 0.04
+48431 0.14 0.01
+48433 0.24 0.04
+48435 0.16 0.01
+48437 0.18 0.04
+48439 0.15 0.11
+48441 0.17 0.05
+48443 0.25 0.0001
+48445 0.19 0.04
+48447 0.12 0.0001
+48449 0.33 0.18
+48451 0.16 0.05
+48453 0.1 0.13
+48455 0.36 0.27
+48457 0.39 0.21
+48459 0.42 0.26
+48461 0.29 0.04
+48463 0.16 0.01
+48465 0.1 0.03
+48467 0.31 0.08
+48469 0.16 0.09
+48471 0.25 0.33
+48473 0.23 0.54
+48475 0.32 0.03
+48477 0.12 0.32
+48479 0.03 0.0001
+48481 0.19 0.21
+48483 0.22 0.04
+48485 0.18 0.07
+48487 0.25 0.09
+48489 0.13 0.01
+48491 0.16 0.14
+48493 0.13 0.02
+48495 0.35 0.03
+48497 0.19 0.01
+48499 0.32 0.16
+48501 0.29 0.01
+48503 0.19 0.02
+48505 0.04 0.0001
+48507 0.1 0.01
diff --git a/eco.pdf b/eco.pdf
deleted file mode 100644
index 56b09ce..0000000
Binary files a/eco.pdf and /dev/null differ
diff --git a/man/census.Rd b/man/census.Rd
index 2394606..1c50f73 100644
--- a/man/census.Rd
+++ b/man/census.Rd
@@ -10,7 +10,7 @@
   This data set contains the proportion of the residents who are black,
   the proportion of those who can read, the total population as well as
   the actual black literacy rate and white literacy rate for 1040
-  counties in the US. The dataset was originally analyzed by Robison
+  counties in the US. The dataset was originally analyzed by Robinson
   (1950) at the state level. King (1997) recoded the 1910 census at
   county level. The data set only includes those who are older than 10
   years of age. 
diff --git a/man/eco.Rd b/man/eco.Rd
index c47c559..4a0650a 100644
--- a/man/eco.Rd
+++ b/man/eco.Rd
@@ -10,9 +10,9 @@
   Normal/Inverse-Wishart prior) for ecological inference in \eqn{2
   \times 2} tables via Markov chain Monte Carlo. It gives the in-sample
   predictions as well as the estimates of the model parameters. The
-  model and algorithm are described in Imai and Lu (2004). The
+  model and algorithm are described in Imai, Lu and Strauss (2006). The
   contextual effect can also be modeled by following the strategy
-  described in Imai and Lu (2005).
+  described in Imai, Lu, and Strauss (2006).
 }
 
 \usage{
@@ -59,9 +59,9 @@ eco(formula, data = parent.frame(), N = NULL, supplement = NULL,
   \item{nu0}{A positive integer representing the prior degrees of
   freedom of the variance matrix \eqn{\Sigma}. the default is \code{4}.
   } 
-  \item{S0}{A postive scalar or a positive definite matrix that specifies 
+  \item{S0}{A positive scalar or a positive definite matrix that specifies 
   the prior scale matrix for the variance matrix \eqn{\Sigma}. If it is 
-  a scalar, then the prior scale matrix will be a digonal matrix with 
+  a scalar, then the prior scale matrix will be a diagonal matrix with 
   the same dimensions as \eqn{\Sigma} and the diagonal elements all take value 
   of \code{S0}, otherwise \code{S0} needs to have same dimensions as 
   \eqn{\Sigma}. When \code{context=TRUE}, \eqn{\Sigma} is a 
@@ -79,7 +79,7 @@ eco(formula, data = parent.frame(), N = NULL, supplement = NULL,
   \item{Sigma.start}{A scalar or a positive definite matrix
    that specified the starting value of the variance matrix 
    \eqn{\Sigma}. If it is  a scalar, then the prior scale 
-  matrix will be a digonal matrix with the same dimensions 
+  matrix will be a diagonal matrix with the same dimensions 
   as \eqn{\Sigma} and the diagonal elements all take value 
   of \code{S0}, otherwise \code{S0} needs to have same dimensions as 
   \eqn{\Sigma}. When \code{context=TRUE}, \eqn{\Sigma} is a 
@@ -135,31 +135,31 @@ eco(formula, data = parent.frame(), N = NULL, supplement = NULL,
 data(reg)
 
 ## NOTE: convergence has not been properly assessed for the following
-## examples. See Imai and Lu (2004, 2005) for more complete analyses.
+## examples. See Imai, Lu and Strauss (2006) for more complete analyses.
 
 ## fit the parametric model with the default prior specification
-res <- eco(Y ~ X, data = reg, verbose = TRUE)
+\dontrun{res <- eco(Y ~ X, data = reg, verbose = TRUE)}
 ## summarize the results
-summary(res)
+\dontrun{summary(res)}
 
 ## obtain out-of-sample prediction
-out <- predict(res, verbose = TRUE)
+\dontrun{out <- predict(res, verbose = TRUE)}
 ## summarize the results
-summary(out)
+\dontrun{summary(out)}
 
 ## load the Robinson's census data
 data(census)
 
 ## fit the parametric model with contextual effects and N 
 ## using the default prior specification
-res1 <- eco(Y ~ X, N = N, context = TRUE, data = census, verbose = TRUE)
+\dontrun{res1 <- eco(Y ~ X, N = N, context = TRUE, data = census, verbose = TRUE)}
 ## summarize the results
-summary(res1)
+\dontrun{summary(res1)}
 
 ## obtain out-of-sample prediction
-out1 <- predict(res1, verbose = TRUE)
+\dontrun{out1 <- predict(res1, verbose = TRUE)}
 ## summarize the results
-summary(out1)
+\dontrun{summary(out1)}
 
 }
 
@@ -190,23 +190,18 @@ summary(out1)
 }
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
+  Kosuke Imai, Department of Politics, Princeton University,
   \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
-  Ying Lu, Institute for Quantitative Social Sciences, 
-  Harvard University \email{ylu at Latte.Harvard.Edu}}
-
-\references{
-  Imai, Kosuke and Ying Lu. (2004) \dQuote{ Parametric and Nonparametric
-  Bayesian Models for Ecological Inference in \eqn{2 \times 2}
-  Tables.} Working Paper, Princeton University,
-  available at
-  \url{http://imai.princeton.edu/research/einonpar.html}
-
-  Imai, Kosuke and Ying Lu. (2005) \dQuote{An Incomplete Data Approach
-  to Ecological Inference.} Working Paper, Princeton University,
-  available at
-  \url{http://imai.princeton.edu/research/coarse.html} }
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
+  }
+
+  \references{
+  Imai, Kosuke, Ying Lu and Aaron Strauss. (2006) \dQuote{Bayesian and
+    Likelihood Inference for 2 x 2 Ecological Tables: An Incomplete Data
+    Approach} Technical Report, Princeton University, available at 
+  \url{http://imai.princeton.edu/research/eiall.html}
 }
 
-\seealso{\code{ecoNP}, \code{predict.eco}, \code{summary.eco}}
+\seealso{\code{ecoML}, \code{ecoNP}, \code{predict.eco}, \code{summary.eco}}
 \keyword{models}
diff --git a/man/ecoBD.Rd b/man/ecoBD.Rd
index 67ee96a..4321ec7 100644
--- a/man/ecoBD.Rd
+++ b/man/ecoBD.Rd
@@ -83,9 +83,9 @@ ecoBD(formula, data = parent.frame(), N = NULL)
 data(reg)
 
 ## calculate the bounds
-res <- ecoBD(Y ~ X, N = N, data = reg)
+\dontrun{res <- ecoBD(Y ~ X, N = N, data = reg)}
 ## print the results
-print(res)
+\dontrun{print(res)}
 }
 
 \value{
diff --git a/man/ecoML.Rd b/man/ecoML.Rd
new file mode 100644
index 0000000..adb7f7d
--- /dev/null
+++ b/man/ecoML.Rd
@@ -0,0 +1,238 @@
+\name{ecoML}
+
+\alias{ecoML}
+
+\title{Fitting Parametric Models and Quantifying Missing Information
+ for Ecological Inference in 2x2 Tables}
+
+\description{
+  \code{ecoML} is used to fit parametric models for ecological 
+  inference in \eqn{2 \times 2} tables via Expectation Maximization (EM)
+  algorithms. It gives the point estimates of the parameters for models
+  based on different assumptions. The standard errors of the point
+  estimates are also computed via Supplemented EM algorithms. Moreover,
+  \code{ecoML} quantifies the amount of missing information associated
+  with each parameter and allows researcher to examine the impact of
+  missing information on parameter estimation in ecological
+  inference. The models and algorithms are described in Imai,
+  Lu and Strauss(2006).
+}
+
+\usage{
+   ecoML(formula, data = parent.frame(), N = NULL, supplement = NULL, 
+         theta.start = c(0,0,1,1,0), fix.rho = FALSE,
+         context = FALSE, sem = TRUE, epsilon = 10^(-10), 
+     maxit = 1000, loglik = TRUE, hyptest = FALSE, verbose = TRUE)  
+}
+
+\arguments{
+  \item{formula}{A symbolic description of the model to be fit,
+    specifying the column and row margins of \eqn{2 \times
+      2} ecological tables. \code{Y ~ X} specifies \code{Y} as the
+    column margin and \code{X} as the row margin. Details and specific
+    examples are given below.
+  } 
+  \item{data}{An optional data frame in which to interpret the variables
+    in \code{formula}. The default is the environment in which
+    \code{ecoML} is called. 
+  }
+  \item{N}{An optional variable representing the size of the unit; e.g.,
+    the total number of voters.
+  }
+  \item{supplement}{An optional matrix of supplemental data. The matrix
+    has two columns, which contain additional individual-level data such
+    as survey data for \eqn{W_1} and \eqn{W_2}, respectively.  If
+    \code{NULL}, no additional individual-level data are included in the
+    model. The default is \code{NULL}.
+  }
+  \item{fix.rho}{Logical. If \code{TRUE}, the correlation
+    (when \code{context=TRUE}) or the partial correlation (when
+    \code{context=FALSE}) between \eqn{W_1} and \eqn{W_2} 
+    is fixed through the estimation. For details, see 
+    Imai, Lu and Strauss(2006). The default is \code{FALSE}.
+  } 
+  \item{context}{Logical. If \code{TRUE}, the contextual effect is also
+    modeled. See Imai, Lu and Strauss (2006) for details. The default is
+    \code{FALSE}. 
+  }
+  \item{sem}{Logical. If \code{TRUE}, the standard errors of parameter
+   estimates are estimated via SEM algorithm. The default is
+    \code{TRUE}. 
+  }
+  \item{theta.start}{A numeric vector that specifies the starting values
+    for the mean, variance, and covariance. When \code{context = FALSE},
+    the elements of \code{theta.start} correspond to (\eqn{E(W_1)},
+    \eqn{E(W_2)}, \eqn{var(W_1)}, \eqn{var(W_2)},
+    \eqn{cor(W_1,W_2)}). When \code{context = TRUE}, the
+    elements of \code{theta.start} correspond to (\eqn{E(W_1)},
+    \eqn{E(W_2)}, \eqn{var(W_1)}, \eqn{var(W_2)}, \eqn{corr(W_1, X)},
+    \eqn{corr(W_2, X)}, \eqn{corr(W_1,W_2)}). Moreover, when
+    \code{fix.rho=TRUE}, \eqn{corr(W_1,W_2)} is set to be the
+    correlation between \eqn{W_1} and \eqn{W_2} when \code{context =
+      FALSE}, and the partial correlation between \eqn{W_1} and
+    \eqn{W_2} given \eqn{X} when \code{context = FALSE}. The default is
+    \code{c(0,0,1,1,0)}. 
+  }
+  \item{epsilon}{A positive number that specifies the convergence criterion
+    for EM algorithm. The square root of \code{epsilon} is the convergence 
+    criterion for SEM algorithm. The default is \code{10^(-10)}. 
+  } 
+  \item{maxit}{A positive integer specifies the maximum number of iterations
+    before the convergence criterion is met. The default is \code{1000}.
+  } 
+  \item{loglik}{Logical. If \code{TRUE}, the value of the log-likelihood
+    function at each iteration of EM is saved. The default is
+    \code{TRUE}.
+  } 
+  \item{hyptest}{Logical. If \code{TRUE}, model is estimated under the null
+    hypothesis that mean of \eqn{W1} and \eqn{W2} is the same. 
+    The default is \code{FALSE}. 
+  } 
+  \item{verbose}{Logical. If \code{TRUE}, the progress of the EM and SEM
+    algorithms is printed to the screen. The default is \code{FALSE}.
+  }
+}
+
+\details{
+  When \code{SEM} is \code{TRUE}, \code{ecoML} computes the observed-data 
+  information matrix for the parameters of interest based on Supplemented-EM 
+  algorithm. The inverse of the observed-data information matrix can be used 
+  to estimate the variance-covariance matrix for the parameters estimated
+  from EM algorithms. In addition, it also computes the expected complete-data 
+  information matrix. Based on these two measures, one can further calculate 
+  the fraction of missing information associated with each parameter. See
+  Imai, Lu and Strauss (2006) for more details about fraction of missing
+  information.
+ 
+  Moreover, when \code{hytest=TRUE}, \code{ecoML} allows to estimate the 
+  parametric model under the null hypothesis that \code{mu_1=mu_2}. One 
+  can then construct the likelihood ratio test to assess the hypothesis of 
+  equal means. The associated fraction of missing information for the test 
+  statistic can be also calculated. For details, see Imai, Lu
+  and Strauss (2006) for details.
+}
+\examples{
+
+## load the census data
+data(census)
+
+## NOTE: convergence has not been properly assessed for the following
+## examples. See Imai, Lu and Strauss (2006) for more complete analyses.
+## In the first example below, in the interest of time, only part of the
+## data set is analyzed and the convergence requirement is less stringent
+## than the default setting.
+
+## In the second example, the program is arbitrarily halted 100 iterations
+## into the simulation, before convergence.
+
+## load the Robinson's census data
+data(census)
+
+## fit the parametric model with the default model specifications
+\dontrun{res <- ecoML(Y ~ X, data = census[1:100,],epsilon=10^(-6), verbose = TRUE)}
+## summarize the results
+\dontrun{summary(res)}
+
+## obtain out-of-sample prediction
+\dontrun{out <- predict(res, verbose = TRUE)}
+## summarize the results
+\dontrun{summary(out)}
+
+## fit the parametric model with some individual 
+## level data using the default prior specification
+surv <- 1:600
+\dontrun{res1 <- ecoML(Y ~ X, context = TRUE, data = census[-surv,], 
+                   supplement = census[surv,c(4:5,1)], maxit=100, verbose = TRUE)}
+## summarize the results
+\dontrun{summary(res1)}
+
+}
+
+\value{
+  An object of class \code{ecoML} containing the following elements:
+  \item{call}{The matched call.}
+  \item{X}{The row margin, \eqn{X}.}
+  \item{Y}{The column margin, \eqn{Y}.}
+  \item{N}{The size of each table, \eqn{N}.}
+  \item{context}{The assumption under which model is estimated. If 
+    \code{context = FALSE}, CAR assumption is adopted and no
+    contextual effect is modeled. If \code{context = TRUE}, NCAR
+    assumption is adopted, and contextual effect is modeled.}
+  \item{sem}{Whether SEM algorithm is used to estimate the standard
+    errors and observed information matrix for the parameter estimates.}
+  \item{fix.rho}{Whether the correlation or the partial correlation between
+    \eqn{W_1} an \eqn{W_2} is fixed in the estimation.}
+  \item{r12}{If \code{fix.rho = TRUE}, the value that \eqn{corr(W_1,
+      W_2)} is fixed to.}
+  \item{epsilon}{The precision criterion for EM convergence. 
+    \eqn{\sqrt{\epsilon}} is the precision criterion for SEM convergence.}
+  \item{theta.sem}{The ML estimates of \eqn{E(W_1)},\eqn{E(W_2)},
+    \eqn{var(W_1)},\eqn{var(W_2)}, and \eqn{cov(W_1,W_2)}. If
+    \code{context = TRUE}, \eqn{E(X)},\eqn{cov(W_1,X)}, 
+    \eqn{cov(W_2,X)} are also reported.}
+  \item{W}{In-sample estimation of \eqn{W_1} and \eqn{W_2}.}
+  \item{suff.stat}{The sufficient statistics for \code{theta.em}.}
+  \item{iters.em}{Number of EM iterations before convergence is achieved.}
+  \item{iters.sem}{Number of SEM iterations before convergence is achieved.}
+  \item{loglik}{The log-likelihood of the model when convergence is
+    achieved.}
+  \item{loglik.log.em}{A vector saving the value of the log-likelihood
+    function at each iteration of the EM algorithm.}
+  \item{mu.log.em}{A matrix saving the mean estimation at each 
+    iteration of EM.}
+  \item{Sigma.log.em}{A matrix saving the variance-covariance estimation 
+    at each iteration of EM.} 
+  Moreover, when \code{sem=TRUE}, \code{ecoML} also output the following
+  values:
+  \item{DM}{The matrix characterizing the rates of convergence of the EM 
+    algorithms. Such information is also used to calculate the observed-data
+    information matrix}
+  \item{Icom}{The (expected) complete data information matrix estimated 
+    via SEM algorithm. When \code{context=FALSE, fix.rho=TRUE}, 
+    \code{Icom} is 4 by 4. When \code{context=FALSE, fix.rho=FALSE}, 
+    \code{Icom} is 5 by 5. When \code{context=TRUE}, \code{Icom} 
+    is 9 by 9.}
+  \item{Iobs}{The observed information matrix. The dimension of 
+    \code{Iobs} is same as \code{Icom}.}
+  \item{Imiss}{The difference between \code{Icom} and \code{Iobs}. 
+    The dimension of \code{Imiss} is same as \code{miss}.}
+  \item{Vobs}{The (symmetrized) variance-covariance matrix of the ML parameter
+    estimates. The dimension of \code{Vobs} is same as 
+    \code{Icom}.}
+  \item{Iobs}{The (expected) complete-data variance-covariance matrix. 
+    The dimension of \code{Iobs} is same as \code{Icom}.}
+  \item{Vobs.original}{The estimated variance-covariance matrix of the 
+    ML parameter  estimates. The dimension of \code{Vobs} is same as 
+    \code{Icom}.}
+  \item{Fmis}{The fraction of missing information associated with each 
+    parameter estimation. }
+  \item{VFmis}{The proportion of increased variance associated with each 
+    parameter estimation due to observed data. }
+  \item{Ieigen}{The largest eigen value of \code{Imiss}.}
+  \item{DM}{The rate-of-convergence matrix of the SEM algorithm.}
+  \item{Icom.trans}{The complete data information matrix for the fisher 
+    transformed parameters.}
+  \item{Iobs.trans}{The observed data information matrix for the fisher 
+    transformed parameters.}
+  \item{Fmis.trans}{The fractions of missing information associated with 
+    the fisher transformed parameters.}
+}
+
+\author{
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu};
+  Aaron Strauss, Department of Politics, Princeton University,
+ \email{abstraus at Princeton.Edu}.
+}
+
+\references{
+  Imai, Kosuke, Ying Lu and Aaron Strauss. (2006) \dQuote{Bayesian and
+    Likelihood Inference for 2 x 2 Ecological Tables: An Incomplete Data
+    Approach} Technical Report, Princeton University, available at 
+  \url{http://imai.princeton.edu/research/eiall.html}
+}
+
+\seealso{\code{eco}, \code{ecoNP}, \code{summary.ecoML}}
+\keyword{models}
diff --git a/man/ecoNP.Rd b/man/ecoNP.Rd
index b730821..9fe3680 100644
--- a/man/ecoNP.Rd
+++ b/man/ecoNP.Rd
@@ -2,7 +2,7 @@
 
 \alias{ecoNP}
 
-\title{Fitting the Nonparametric Bayesian Model of Ecological Inference
+\title{Fitting the Nonparametric Bayesian Models of Ecological Inference
   in 2x2 Tables}  
 
 \description{
@@ -10,9 +10,8 @@
   a Dirichlet process prior) for ecological inference in \eqn{2 \times
   2} tables via Markov chain Monte Carlo. It gives the in-sample
   predictions as well as out-of-sample predictions for population
-  inference.  The model and algorithm are described in Imai and Lu
-  (2004). The contextual effect can also be modeled by following the
-  strategy described in Imai and Lu (2005).
+  inference.  The models and algorithms are described in Imai, Lu and
+  Strauss (2006). 
 }
 
 \usage{
@@ -59,9 +58,9 @@ ecoNP(formula, data = parent.frame(), N = NULL, supplement = NULL,
   \item{nu0}{A positive integer representing the prior degrees of
   freedom of the variance matrix \eqn{\Sigma}. the default is \code{4}.
   } 
-  \item{S0}{A postive scalar or a positive definite matrix that specifies 
+  \item{S0}{A positive scalar or a positive definite matrix that specifies 
   the prior scale matrix for the variance matrix \eqn{\Sigma}. If it is 
-  a scalar, then the prior scale matrix will be a digonal matrix with 
+  a scalar, then the prior scale matrix will be a diagonal matrix with 
   the same dimensions as \eqn{\Sigma} and the diagonal elements all take value 
   of \code{S0}, otherwise \code{S0} needs to have same dimensions as 
   \eqn{\Sigma}. When \code{context=TRUE}, \eqn{\Sigma} is a 
@@ -105,27 +104,11 @@ ecoNP(formula, data = parent.frame(), N = NULL, supplement = NULL,
     Markov chain; i.e. the number of Gibbs draws between the recorded
     values that are skipped. The default is \code{0}.
   } 
-  \item{verbose}{Logical. If \code{TRUE}, the progress of the gibbs 
+  \item{verbose}{Logical. If \code{TRUE}, the progress of the Gibbs 
    sampler is printed to the screen. The default is \code{FALSE}.
   }
 }
 
-\details{
-  An example of \eqn{2 \times 2} ecological table for racial voting is
-  given below: 
-  \tabular{lccc}{
-    \tab black voters  \tab white voters \tab \cr
-    Voted \tab \eqn{W_{1i}}  \tab \eqn{W_{2i}} \tab \eqn{Y_i} \cr
-    Not voted \tab \eqn{1-W_{1i}}  \tab \eqn{1-W_{2i}} \tab \eqn{1-Y_i}  \cr
-    \tab \eqn{X_i} \tab \eqn{1-X_i} \tab 
-  }
-  where \eqn{Y_i} and \eqn{X_i} represent the observed margins, and
-  \eqn{W_1} and \eqn{W_2} are unknown variables. All variables are
-  proportions and hence bounded between 0 and 1. For each \eqn{i}, the
-  following deterministic relationship holds,
-  \eqn{Y_i=X W_{1i}+(1-X_i)W_{2i}}.
-}
-
 \examples{
 
 ## load the registration data
@@ -133,7 +116,7 @@ data(reg)
 
 ## NOTE: We set the number of MCMC draws to be a very small number in
 ## the following examples; i.e., convergence has not been properly
-## assessed. See Imai and Lu (2004, 2005) for more complete examples.
+## assessed. See Imai, Lu and Strauss (2006) for more complete examples.
 
 ## fit the nonparametric model to give in-sample predictions
 ## store the parameters to make population inference later
@@ -157,14 +140,14 @@ data(census)
 
 ## fit the parametric model with contextual effects and N 
 ## using the default prior specification
-res1 <- ecoNP(Y ~ X, N = N, context = TRUE, param = TRUE, data = census,
-              n.draws = 25, verbose = TRUE)
+\dontrun{res1 <- ecoNP(Y ~ X, N = N, context = TRUE, param = TRUE, data = census,
+              n.draws = 25, verbose = TRUE)}
 ## summarize the results
-summary(res1)
+\dontrun{summary(res1)}
 
 ## out-of sample prediction 
-pres1 <- predict(res1)
-summary(pres1)
+\dontrun{pres1 <- predict(res1)}
+\dontrun{summary(pres1)}
 }
 
 \value{
@@ -201,22 +184,18 @@ summary(pres1)
 }
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
-  \email{kimai at Princeton.Edu}, \url{http://www.princeton.edu/~kimai};
-  Ying Lu, Institute for Quantitative Social Sciences, 
-  Harvard University \email{ylu at Latte.Harvard.Edu}}
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
+}
 
 \references{
-  Imai, Kosuke and Ying Lu. (2004) \dQuote{ Parametric and Nonparametric
-  Bayesian Models for Ecological Inference in \eqn{2 \times 2}
-  Tables.} Proceedings of the American Statistical Association.
-\url{http://www.princeton.edu/~kimai/research/einonpar.html}
-
-Imai, Kosuke and Ying Lu. (2005) \dQuote{An Incomplete Data Approach
-  to Ecological Inference.} Working Paper, Princeton University,
-available at
-\url{http://www.princeton.edu/~kimai/research/einonpar.html} 
+  Imai, Kosuke, Ying Lu and Aaron Strauss. (2006) \dQuote{Bayesian and
+    Likelihood Inference for 2 x 2 Ecological Tables: An Incomplete Data
+    Approach} Technical Report, Princeton University, available at 
+  \url{http://imai.princeton.edu/research/eiall.html}
 }
 
-\seealso{\code{eco}, \code{predict.eco}, \code{summary.ecoNP}}
+\seealso{\code{eco}, \code{ecoML}, \code{predict.eco}, \code{summary.ecoNP}}
 \keyword{models}
diff --git a/man/forgnlit30.Rd b/man/forgnlit30.Rd
new file mode 100644
index 0000000..668cac2
--- /dev/null
+++ b/man/forgnlit30.Rd
@@ -0,0 +1,37 @@
+\name{forgnlit30}
+
+\docType{data}
+
+\alias{forgnlit30}
+
+\title{Foreign-born literacy in 1930}
+
+\description{
+  This data set contains, on a state level, the proportion of
+  white residents ten years and older who are foreign born, and
+  the proportion of those residents who are literate.  Data come from
+  the 1930 census and were first analyzed by Robinson (1950).
+}
+
+\usage{data(forgnlit30)}
+
+\format{A data frame containing 5 variables and 48 observations
+ \tabular{lll}{
+    X \tab numeric \tab proportion of the white population at least 10
+     years of age that is foreign born \cr
+    Y \tab numeric \tab proportion of the white population at least 10
+     years of age that is illiterate \cr
+    W1 \tab numeric \tab proportion of the foreign-born white population
+     at least 10 years of age that is illiterate \cr
+    W2 \tab numeric \tab proportion of the native-born white population
+     at least 10 years of age that is illiterate \cr
+    ICPSR \tab numeric \tab the ICPSR state code
+  }
+}
+\references{
+ Robinson, W.S. (1950). ``Ecological Correlations and the Behavior
+ of Individuals.'' \emph{American Sociological Review}, vol. 15,
+ pp.351-357.
+}
+
+\keyword{datasets}
diff --git a/man/forgnlit30c.Rd b/man/forgnlit30c.Rd
new file mode 100644
index 0000000..3478497
--- /dev/null
+++ b/man/forgnlit30c.Rd
@@ -0,0 +1,39 @@
+\name{forgnlit30c}
+
+\docType{data}
+
+\alias{forgnlit30c}
+
+\title{Foreign-born literacy in 1930, County Level}
+
+\description{
+  This data set contains, on a county level, the proportion of
+  white residents ten years and older who are foreign born, and
+  the proportion of those residents who are literate.  Data come from
+  the 1930 census and were first analyzed by Robinson (1950). Counties
+  with fewer than 100 foreign born residents are dropped.
+}
+
+\usage{data(forgnlit30c)}
+
+\format{A data frame containing 6 variables and 1976 observations
+ \tabular{lll}{
+    X \tab numeric \tab proportion of the white population at least 10
+     years of age that is foreign born \cr
+    Y \tab numeric \tab proportion of the white population at least 10
+     years of age that is illiterate \cr
+    W1 \tab numeric \tab proportion of the foreign-born white population
+     at least 10 years of age that is illiterate \cr
+    W2 \tab numeric \tab proportion of the native-born white population
+     at least 10 years of age that is illiterate \cr
+    state \tab numeric \tab the ICPSR state code \cr
+    county \tab numeric \tab the ICPSR (within state) county code 
+  }
+}
+\references{
+ Robinson, W.S. (1950). ``Ecological Correlations and the Behavior
+ of Individuals.'' \emph{American Sociological Review}, vol. 15,
+ pp.351-357.
+}
+
+\keyword{datasets}
diff --git a/man/housep88.Rd b/man/housep88.Rd
new file mode 100644
index 0000000..acacbe6
--- /dev/null
+++ b/man/housep88.Rd
@@ -0,0 +1,44 @@
+\name{housep88}
+
+\docType{data}
+
+\alias{housep88}
+
+\title{Electoral Results for the House and Presidential Races in 1988} 
+
+\description{
+  This data set contains, on a House district level, the percentage of the
+  vote for the Democratic House candidate, the percentage of the vote for
+  the Democratic presidential candidate (Dukakis), the number of voters who
+  voted for a major party candidate in the presidential race, and the ratio
+  of voters in the House race versus the number who cast a ballot for
+  President.  Eleven (11) uncontested races are not included.  Dataset
+  compiled and analyzed by Burden and Kimball (1988). Complete dataset and
+  documentation available at ICSPR study number 1140.
+}
+
+\usage{data(housep88)}
+
+\format{A data frame containing 5 variables and 424 observations 
+ \tabular{lll}{
+    X \tab numeric \tab proportion voting for the Democrat in the 
+        presidential race \cr
+    Y \tab numeric \tab proportion voting for the Democrat in the 
+        House race \cr
+    N \tab numeric \tab number of major party voters in the presidential
+        contest \cr
+    HPCT \tab numeric \tab House election turnout divided by presidential
+        election turnout (set to 1 if House turnout exceeds presidential
+         turnout) \cr
+    DIST \tab numeric \tab 4-digit ICPSR state and district code: first
+         2 digits for the state code, last two digits for the district
+         number (e.g., 2106=IL 6th)
+  }
+}
+\references{
+ Burden, Barry C. and David C. Kimball (1988). ``A New Approach To Ticket-
+  Splitting.'' {\em The American Political Science Review}. vol 92.,
+   no. 3, pp. 553-544.
+}
+
+\keyword{datasets}
diff --git a/man/predict.eco.Rd b/man/predict.eco.Rd
index ed8e53d..9b0317d 100644
--- a/man/predict.eco.Rd
+++ b/man/predict.eco.Rd
@@ -13,8 +13,10 @@ Model for Ecological Inference in 2x2 Tables}
 }
 
 \usage{
-  \method{predict}{eco}(object, newdraw = NULL, subset = NULL, verbose = FALSE, ...)
-  \method{predict}{ecoX}(object, newdraw = NULL, subset = NULL, newdata = NULL, cond = FALSE, verbose = FALSE, ...)
+  \method{predict}{eco}(object, newdraw = NULL, subset = NULL,
+                   verbose = FALSE, ...)
+  \method{predict}{ecoX}(object, newdraw = NULL, subset = NULL,
+                   newdata = NULL, cond = FALSE, verbose = FALSE, ...)
 }
 
 \arguments{
@@ -70,9 +72,10 @@ Model for Ecological Inference in 2x2 Tables}
 \seealso{\code{eco}, \code{predict.ecoNP}}
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
-  \email{kimai at Princeton.Edu}; Ying Lu, Institute for Quantitative
-  Social Sciences, Harvard University \email{ylu at Latte.Harvard.Edu}
-}
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
+  }
 
 \keyword{methods}
diff --git a/man/predict.ecoNP.Rd b/man/predict.ecoNP.Rd
index 9e2e4dd..0439112 100644
--- a/man/predict.ecoNP.Rd
+++ b/man/predict.ecoNP.Rd
@@ -13,8 +13,10 @@
 }
 
 \usage{
-  \method{predict}{ecoNP}(object, newdraw = NULL, subset = NULL, obs = NULL, verbose = FALSE, ...)
-  \method{predict}{ecoNPX}(object, newdraw = NULL, subset = NULL, obs = NULL, cond = FALSE, verbose = FALSE, ...)
+  \method{predict}{ecoNP}(object, newdraw = NULL, subset = NULL, obs = NULL,
+                   verbose = FALSE, ...)
+  \method{predict}{ecoNPX}(object, newdraw = NULL, subset = NULL, obs = NULL,
+                   cond = FALSE, verbose = FALSE, ...)
 }
 
 \arguments{
@@ -73,9 +75,10 @@
   \code{summary.ecoNP}}
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
-  \email{kimai at Princeton.Edu}; Ying Lu, Institute for Quantitative
-  Social Sciences, Harvard University \email{ylu at Latte.Harvard.Edu}
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
 }
 
 \keyword{methods}
diff --git a/man/summary.eco.Rd b/man/summary.eco.Rd
index 60ddacf..4df760e 100644
--- a/man/summary.eco.Rd
+++ b/man/summary.eco.Rd
@@ -1,6 +1,7 @@
 \name{summary.eco}
 
 \alias{summary.eco}
+\alias{print.eco}
 \alias{print.summary.eco}
 
 \title{Summarizing the Results for the Bayesian Parametric Model for
@@ -63,10 +64,10 @@
 \seealso{\code{eco}, \code{predict.eco}}
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
-  \email{kimai at Princeton.Edu}, \url{http://www.princeton.edu/~kimai};
-  Ying Lu, Institute for Quantitative Social Sciences, 
-  Harvard University \email{ylu at Latte.Harvard.Edu}
-}
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
+  }
 
 \keyword{methods}
diff --git a/man/summary.ecoML.Rd b/man/summary.ecoML.Rd
new file mode 100644
index 0000000..8e8bc6d
--- /dev/null
+++ b/man/summary.ecoML.Rd
@@ -0,0 +1,87 @@
+\name{summary.ecoML}
+
+\alias{summary.ecoML}
+\alias{print.summary.ecoML}
+
+\title{Summarizing the Results for the Maximum Likelihood Parametric Model for
+  Ecological Inference in 2x2 Tables}
+
+\description{
+  \code{summary} method for class \code{eco}.
+}
+
+\usage{
+  \method{summary}{ecoML}(object, CI = c(2.5, 97.5),  param = TRUE, units = FALSE,
+     subset = NULL, ...) 
+
+  \method{print}{summary.ecoML}(x, digits = max(3, getOption("digits") - 3), ...)
+}
+
+\arguments{
+  \item{object}{An output object from \code{eco}.}
+  \item{CI}{A vector of lower and upper bounds for the Bayesian credible
+    intervals used to summarize the results. The default is the
+    equal tail 95 percent credible interval.
+  }
+  \item{param}{Ignored.}
+  \item{subset}{A numeric vector indicating the subset of the units whose 
+    in-sample predications to be provided when \code{units} is 
+    \code{TRUE}. The default value is \code{NULL} where the in-sample 
+    predictions for each unit will be provided.
+  }
+  \item{units}{Logical. If \code{TRUE}, the in-sample predictions for
+    each unit or for a subset of units will be provided. The default 
+    value is \code{FALSE}.
+  } 
+  
+  \item{x}{An object of class \code{summary.ecoML}.}
+  \item{digits}{the number of significant digits to use when printing.}
+ 
+  \item{...}{further arguments passed to or from other methods.}
+}
+
+\value{
+  \code{summary.eco} yields an object of class \code{summary.eco}
+  containing the following elements:
+  \item{call}{The call from \code{eco}.}
+  \item{sem}{Whether the SEM algorithm was executed, as specified by the
+    user upon calling \code{ecoML}.}
+  \item{fix.rho}{Whether the correlation parameter was fixed or allowed to
+    vary, as specified by the user upon calling \code{ecoML}.}
+  \item{epsilon}{The convergence threshold specified by the user upon
+    calling \code{ecoML}.}
+  \item{n.obs}{The number of units.}
+  \item{iters.em}{The number iterations the EM algorithm cycled through
+    before convergence or reaching the maximum number of iterations
+    allowed.}
+  \item{iters.sem}{The number iterations the SEM algorithm cycled through
+    before convergence or reaching the maximum number of iterations
+    allowed.}
+  \item{loglik}{The final observed log-likelihood.}
+  \item{rho}{A matrix of \code{iters.em} rows specifying the correlation
+    parameters at each iteration of the EM algorithm. The number of columns
+    depends on how many correlation parameters exist in the model. Column
+    order is the same as the order of the parameters in \code{param.table}.}
+  \item{param.table}{Final estimates of the parameter values for the model. 
+    Excludes parameters fixed by the user upon calling \code{ecoML}.}
+  \item{agg.table}{Aggregate estimates of the marginal means
+    of \eqn{W_1} and \eqn{W_2}}
+  \item{agg.wtable}{Aggregate estimates of the marginal means
+    of \eqn{W_1} and \eqn{W_2} using \eqn{X} and \eqn{N} as weights.}
+  If \code{units = TRUE}, the following elements are also included:
+  \item{W.table}{Unit-level estimates for \eqn{W_1} and \eqn{W_2}.}
+
+  This object can be printed by \code{print.summary.eco}
+}
+
+\seealso{\code{eco}, \code{predict.eco}}
+
+\author{
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}; Aaron Strauss, Department of Politics,
+  Princeton University, \email{abstraus at Princeton.Edu}
+  }
+
+\keyword{methods}
diff --git a/man/summary.ecoNP.Rd b/man/summary.ecoNP.Rd
index 8c5d759..0e39f83 100644
--- a/man/summary.ecoNP.Rd
+++ b/man/summary.ecoNP.Rd
@@ -62,10 +62,10 @@ Ecological Inference in 2x2 Tables }
 \seealso{\code{ecoNP}, \code{predict.eco}}
 
 \author{
-  Kosuke Imai, Department of Politics, Princeton University
-  \email{kimai at Princeton.Edu}, \url{http://www.princeton.edu/~kimai};
-  Ying Lu, Institute for Quantitative Social Sciences, 
-  Harvard University \email{ylu at Latte.Harvard.Edu}
+  Kosuke Imai, Department of Politics, Princeton University,
+  \email{kimai at Princeton.Edu}, \url{http://imai.princeton.edu};
+  Ying Lu, Department of Sociology, University of Colorado at Boulder, 
+  \email{ying.lu at Colorado.Edu}
 }
 
 \keyword{methods}
diff --git a/man/wallace.Rd b/man/wallace.Rd
new file mode 100644
index 0000000..18cdbbf
--- /dev/null
+++ b/man/wallace.Rd
@@ -0,0 +1,37 @@
+\name{wallace}
+
+\docType{data}
+
+\alias{wallace}
+
+\title{Black voting rates for Wallace for President, 1968}
+
+\description{
+  This data set contains, on a county level, the proportion of
+  county residents who are Black and the proportion of presidential
+  votes cast for Wallace.  Demographic data is based on the 1960
+  census. Presidential returns are from ICPSR study 13.  County data
+  from 10 southern states (Alabama, Arkansas, Georgia, Florida,
+  Louisiana, Mississippi, North Carolina, South Carolina, Tennessee,
+  Texas) are included. (Virginia is excluded due
+  to the difficulty of matching counties between the datasets.)
+  This data is analyzed in Wallace and Segal (1973).
+}
+
+\usage{data(wallace)}
+
+\format{A data frame containing 3 variables and 1009 observations
+ \tabular{lll}{
+    X \tab numeric \tab proportion of the population that is Black \cr
+    Y \tab numeric \tab proportion presidential votes cast for Wallace \cr
+    FIPS \tab numeric \tab the FIPS county code
+  }
+}
+
+ \references{ Wasserman, Ira M. and David R. Segal (1973).
+``Aggregation Effects in the Ecological Study of Presidential
+Voting.'' {\em American Journal of Political Science}, vol. 17, pp.
+177-81.
+ }
+
+\keyword{datasets}
diff --git a/src/fintegrate.c b/src/fintegrate.c
new file mode 100644
index 0000000..936a69a
--- /dev/null
+++ b/src/fintegrate.c
@@ -0,0 +1,352 @@
+/******************************************************************
+  This file is a part of eco: R Package for Estimating Fitting
+  Bayesian Models of Ecological Inference for 2X2 tables
+  by Ying Lu and Kosuke Imai
+  Copyright: GPL version 2 or later.
+*******************************************************************/
+
+#include <stddef.h>
+#include <stdio.h>
+#include <math.h>
+#include <Rmath.h>
+#include <R.h>
+#include <Rinternals.h>
+#include <R_ext/Utils.h>
+#include <R_ext/PrtUtil.h>
+#include "vector.h"
+#include "subroutines.h"
+#include "rand.h"
+#include "sample.h"
+#include "bayes.h"
+#include "macros.h"
+#include "fintegrate.h"
+//#include  <gsl/gsl_integration.h>
+
+//Bivariate normal distribution, with parameterization
+//see: http://mathworld.wolfram.com/BivariateNormalDistribution.html
+//see for param: http://www.math.uconn.edu/~binns/reviewII210.pdf
+void NormConstT(double *t, int n, void *param)
+{
+  int ii;
+  int dim=2;
+  double *mu=doubleArray(dim);
+  double **Sigma=doubleMatrix(dim,dim);
+  double *W1,*W1p,*W2,*W2p;
+  double X, Y, rho;
+  double dtemp, inp, pfact;
+  int imposs;
+
+  W1 = doubleArray(n);
+  W1p = doubleArray(n);
+  W2 = doubleArray(n);
+  W2p = doubleArray(n);
+
+  Param *pp=(Param *)param;
+  mu[0]= pp->caseP.mu[0];
+  mu[1]= pp->caseP.mu[1];
+  Sigma[0][0]=pp->setP->Sigma[0][0];
+  Sigma[1][1]=pp->setP->Sigma[1][1];
+  Sigma[0][1]=pp->setP->Sigma[0][1];
+  Sigma[1][0]=pp->setP->Sigma[1][0];
+  rho=Sigma[0][1]/sqrt(Sigma[0][0]*Sigma[1][1]);
+  //Rprintf("TESTING: %4g %4g %4g %4g", pp->caseP.mu[0], pp->caseP.mu[1], pp->setP->Sigma[0][0],pp->setP->Sigma[0][1]);
+  X=pp->caseP.X;
+  Y=pp->caseP.Y;
+  imposs=0;
+
+  dtemp=1/(2*M_PI*sqrt(Sigma[0][0]*Sigma[1][1]*(1-rho*rho)));
+
+  for (ii=0; ii<n; ii++)
+    {
+      imposs=0; inp=t[ii];
+      W1[ii]=getW1starFromT(t[ii],pp,&imposs);
+      if (!imposs) W2[ii]=getW2starFromT(t[ii],pp,&imposs);
+		  if (imposs==1) t[ii]=0;
+      else {
+          W1p[ii]=getW1starPrimeFromT(t[ii],pp);
+          W2p[ii]=getW2starPrimeFromT(t[ii],pp);
+          pfact=sqrt(W1p[ii]*W1p[ii]+W2p[ii]*W2p[ii]);
+          t[ii]=exp(-1/(2*(1-rho*rho))*
+                ((W1[ii]-mu[0])*(W1[ii]-mu[0])/Sigma[0][0]+
+                 (W2[ii]-mu[1])*(W2[ii]-mu[1])/Sigma[1][1]-
+                  2*rho*(W1[ii]-mu[0])*(W2[ii]-mu[1])
+                /sqrt(Sigma[0][0]*Sigma[1][1])))*dtemp*pfact;
+    //if (pp->setP->weirdness)
+     //   Rprintf("Normc... %d %d %5g -> %5g %5g => %5g with %5g imposs %d\n", ii, n, inp, W1[ii], W2[ii],t[ii],pfact,imposs);
+        //char ch;
+        //scanf(" %c", &ch );
+      }
+    }
+  Free(W1);
+  Free(W1p);
+  Free(W2);
+  Free(W2p);
+  Free(mu);
+  FreeMatrix(Sigma,dim);
+}
+
+/*
+ * Integrand for computing sufficient statistic
+ * Which statistic to estimate depends on param->suff (see macros.h)
+ */
+void SuffExp(double *t, int n, void *param)
+{
+  int ii,imposs,suff,i,j;
+  Param *pp=(Param *)param;
+  int dim = (pp->setP->ncar==1) ? 3 : 2;
+  double *mu=doubleArray(dim);
+  double **Sigma=doubleMatrix(dim,dim);
+  double **InvSigma=doubleMatrix(dim,dim);/* inverse covariance matrix*/
+  //double Sigma[dim][dim];
+  //double InvSigma[dim][dim];
+  double *W1,*W1p,*W2,*W2p,*vtemp;
+  double inp,density,pfact,normc;
+
+  vtemp=doubleArray(dim);
+  W1 = doubleArray(n);
+  W1p = doubleArray(n);
+  W2 = doubleArray(n);
+  W2p = doubleArray(n);
+  mu[0]= pp->caseP.mu[0];
+  mu[1]= pp->caseP.mu[1];
+  for(i=0;i<dim;i++)
+    for(j=0;j<dim;j++) {
+      if (dim==3) {
+        Sigma[i][j]=pp->setP->Sigma3[i][j];
+        InvSigma[i][j]=pp->setP->InvSigma3[i][j];
+      }
+      else {
+        Sigma[i][j]=pp->setP->Sigma[i][j];
+        InvSigma[i][j]=pp->setP->InvSigma[i][j];
+      }
+    }
+  normc=pp->caseP.normcT;
+  suff=pp->caseP.suff;
+  imposs=0;
+
+   for (ii=0; ii<n; ii++)
+    {
+     imposs=0; inp=t[ii];
+      W1[ii]=getW1starFromT(t[ii],pp,&imposs);
+      if (!imposs) W2[ii]=getW2starFromT(t[ii],pp,&imposs);
+		  if (imposs==1) t[ii]=0;
+      else {
+          W1p[ii]=getW1starPrimeFromT(t[ii],pp);
+          W2p[ii]=getW2starPrimeFromT(t[ii],pp);
+          pfact=sqrt(W1p[ii]*W1p[ii]+W2p[ii]*W2p[ii]);
+          vtemp[0] = W1[ii];
+          vtemp[1] = W2[ii];
+          density=dBVNtomo(vtemp, pp, 0,normc);
+          t[ii] = density*pfact;
+          if (suff==0) t[ii]=W1[ii]*t[ii];
+          else if (suff==1) t[ii]=W2[ii]*t[ii];
+          else if (suff==2) t[ii]=W1[ii]*W1[ii]*t[ii];
+          else if (suff==3) t[ii]=W1[ii]*W2[ii]*t[ii];
+          else if (suff==4) t[ii]=W2[ii]*W2[ii]*t[ii];
+          else if (suff==5) t[ii]=invLogit(W1[ii])*t[ii];
+          else if (suff==6) t[ii]=invLogit(W2[ii])*t[ii];
+          else if (suff==7) {
+            if (dim == 3) {
+              //if(pp->setP->verbose>=2 && dim==3) Rprintf("InvSigma loglik: %5g %5g %5g %5g %5g %5g\n",InvSigma[0][0],InvSigma[0][1],InvSigma[1][0],InvSigma[1][1],InvSigma[1][2],InvSigma[2][2]);
+              vtemp[2]=logit(pp->caseP.X,"log-liklihood");
+              mu[0]=pp->setP->pdTheta[1];
+              mu[1]=pp->setP->pdTheta[2];
+              mu[2]=pp->setP->pdTheta[0];
+            }
+            t[ii]=dMVN(vtemp,mu,InvSigma,dim,0)*pfact;
+            //t[ii]=dMVN3(vtemp,mu,(double*)(&(InvSigma[0][0])),dim,0)*pfact;
+          }
+          else if (suff!=-1) Rprintf("Error Suff= %d",suff);
+        }
+    }
+  Free(W1);Free(W1p);Free(W2);Free(W2p);Free(mu);Free(vtemp);
+  FreeMatrix(Sigma,dim); FreeMatrix(InvSigma,dim);
+}
+
+//Returns the log likelihood of a particular case
+double getLogLikelihood(Param* param) {
+  if (param->caseP.dataType==0  && !(param->caseP.Y>=.990 || param->caseP.Y<=.010)) {
+    param->caseP.suff=7;
+    return log(paramIntegration(&SuffExp,(void*)param));
+  }
+  else if (param->caseP.dataType==3 || (param->caseP.Y>=.990 || param->caseP.Y<=.010)) {
+    int dim=param->setP->ncar ? 3 : 2;
+    double *mu=doubleArray(dim);
+    double *vtemp=doubleArray(dim);
+    double **InvSig=doubleMatrix(dim,dim);/* inverse covariance matrix*/
+    int i,j;
+    for(i=0;i<dim;i++)
+      for(j=0;j<dim;j++) {
+        if (dim==3) {
+          InvSig[i][j]=param->setP->InvSigma3[i][j];
+        }
+        else {
+          InvSig[i][j]=param->setP->InvSigma[i][j];
+        }
+      }
+    double loglik;
+        vtemp[0] = param->caseP.Wstar[0];
+        vtemp[1] = param->caseP.Wstar[1];
+        mu[0]= param->caseP.mu[0];
+        mu[1]= param->caseP.mu[1];
+        if (param->setP->ncar) {
+          vtemp[2]=logit(param->caseP.X,"log-likelihood survey");
+          mu[0]=param->setP->pdTheta[1];
+          mu[1]=param->setP->pdTheta[2];
+          mu[2]=param->setP->pdTheta[0];
+          loglik=dMVN(vtemp,mu,InvSig,dim,1);
+        }
+        else {
+          loglik=dMVN(vtemp,mu,InvSig,dim,1);
+        }
+      Free(mu); Free(vtemp); FreeMatrix(InvSig,dim);
+      return loglik;
+      }
+  else {
+    Rprintf("Error.\n");
+    return 0;
+  }
+}
+
+//Finds W2star, given the equation
+//Y=XW1 + (1-X)W2 and the Wistar=logit(Wi)
+//imposs is set to 1 if the equation cannot be satisfied
+double getW2starFromW1star(double X, double Y, double W1star, int* imposs) {
+      double W1;
+      if (W1star>30) W1=1; //prevent overflow or underflow
+      else W1=1/(1+exp(-1*W1star));
+      double W2=Y/(1-X)-X*W1/(1-X);
+
+      if(W2>=1 || W2<=0) *imposs=1; //impossible pair of values
+      else W2=log(W2/(1-W2));
+      return W2;
+}
+
+double getW1starFromW2star(double X, double Y, double W2star, int* imposs) {
+      double W2;
+      if (W2star>30) W2=1; //prevent overflow or underflow
+      else W2=1/(1+exp(-1*W2star));
+      double W1=(Y-(1-X)*W2)/X;
+
+      if(W1>=1 || W1<=0) *imposs=1; //impossible pair of values
+      else W1=log(W1/(1-W1));
+      return W1;
+}
+
+double getW1FromW2(double X, double Y, double W2) {
+      return (Y-(1-X)*W2)/X;
+}
+
+
+//W1star(t)
+//W1(t)=(W1_ub - W1_lb)*t + W1_lb
+double getW1starFromT(double t, Param* param, int* imposs) {
+    double W1=(param->caseP.Wbounds[0][1] - param->caseP.Wbounds[0][0])*t + param->caseP.Wbounds[0][0];
+    if (W1==1 || W1==0) *imposs=1;
+    else W1=log(W1/(1-W1));
+    return W1;
+}
+//W2star(t)
+//W2(t)=(W2_lb - W2_ub)*t + W2_lb
+double getW2starFromT(double t, Param* param, int* imposs) {
+    double W2=(param->caseP.Wbounds[1][0] - param->caseP.Wbounds[1][1])*t + param->caseP.Wbounds[1][1];
+    if (W2==1 || W2==0) *imposs=1;
+    else W2=log(W2/(1-W2));
+    return W2;
+}
+//W1star'(t)
+//see paper for derivation: W1*(t) = (1/W1)*((w1_ub - w1_lb)/(1-W1)
+double getW1starPrimeFromT(double t, Param* param) {
+    double m=(param->caseP.Wbounds[0][1] - param->caseP.Wbounds[0][0]);
+    double W1=m*t + param->caseP.Wbounds[0][0];
+    W1=(1/W1)*(m/(1-W1));
+    return W1;
+}
+//W2star'(t)
+//see paper for derivation: W2*(t) = (1/W2)*((w2_lb - w2_ub)/(1-W2)
+double getW2starPrimeFromT(double t, Param* param) {
+    double m=(param->caseP.Wbounds[1][0] - param->caseP.Wbounds[1][1]);
+    double W2=m*t + param->caseP.Wbounds[1][1];
+    W2=(1/W2)*(m/(1-W2));
+    return W2;
+}
+
+//parameterized integration: bounds always from 0,1
+double paramIntegration(integr_fn f, void *ex) {
+  double epsabs=pow(10,-11), epsrel=pow(10,-11);
+  double result=9999, anserr=9999;
+  int limit=100;
+  int last, neval, ier;
+  int lenw=5*limit;
+  int *iwork=(int *) Calloc(limit, int);
+  double *work=(double *)Calloc(lenw, double);
+  double lb=0.00001; double ub=.99999;
+    Rdqags(f, ex, &lb, &ub, &epsabs, &epsrel, &result,
+      &anserr, &neval, &ier, &limit, &lenw, &last, iwork, work);
+
+  Free(iwork);
+  Free(work);
+  if (ier==0) return result;
+  else {
+    Param* p = (Param*) ex;
+    Rprintf("Integration error %d: Sf %d X %5g Y %5g [%5g,%5g] -> %5g +- %5g\n",ier,p->caseP.suff,p->caseP.X,p->caseP.Y,p->caseP.Wbounds[0][0],p->caseP.Wbounds[0][1],result,anserr);
+    char ch;
+    scanf("Hit enter to continue %c", &ch );
+    return result;
+  }
+
+}
+
+/* integrate normalizing constant and set it in param*/
+void setNormConst(Param* param) {
+    param->caseP.normcT=paramIntegration(&NormConstT,(void*)param);
+}
+
+
+/*
+ * Set the bounds on W1 and W2 in their parameter
+ */
+void setBounds(Param* param) {
+  double X,Y,w1_lb,w1_ub,w2_lb,w2_ub;
+  //int w1_inf,w2_inf;
+  double tol0=0.0001;
+  double tol1=0.9999;
+  X=param->caseP.X;
+  Y=param->caseP.Y;
+
+  //find bounds for W1
+  w1_ub=(Y-(1-X)*0)/X; //W2=0
+  if (w1_ub>tol1) w1_ub=1;
+  w1_lb=(Y-(1-X)*1)/X; //W2=1
+  if (w1_lb<tol0) w1_lb=0;
+
+  //find bounds for W2
+  w2_ub=Y/(1-X)-X*0/(1-X); //W1=0
+  if (w2_ub>tol1) w2_ub=1;
+  w2_lb=Y/(1-X)-X*1/(1-X); //W1=1
+  if (w2_lb<tol0) w2_lb=0;
+
+
+  /*
+  if (w1_lb==0 && w1_ub==1) w1_inf=2;
+  else if (w1_lb==0) w1_inf=-1;
+  else if (w1_ub==1) w1_inf=1;
+  else w1_inf=0;
+  //w1_lb=log(w1_lb/(1-w1_lb));
+  //w1_ub=log(w1_ub/(1-w1_ub));
+
+  if (w2_lb==0 && w2_ub==1) w2_inf=2;
+  else if (w2_lb==0) w2_inf=-1;
+  else if (w2_ub==1) w2_inf=1;
+  else w2_inf=0;
+  //w2_lb=log(w2_lb/(1-w2_lb));
+  //w2_ub=log(w2_ub/(1-w2_ub));
+  */
+  param->caseP.Wbounds[0][0]=w1_lb;
+  param->caseP.Wbounds[0][1]=w1_ub;
+  param->caseP.Wbounds[1][0]=w2_lb;
+  param->caseP.Wbounds[1][1]=w2_ub;
+  //param->W1_inf=w1_inf;
+  //param->W2_inf=w2_inf;
+
+}
diff --git a/src/fintegrate.h b/src/fintegrate.h
new file mode 100644
index 0000000..fbe9598
--- /dev/null
+++ b/src/fintegrate.h
@@ -0,0 +1,23 @@
+/******************************************************************
+  This file is a part of eco: R Package for Fitting Bayesian Models
+  of Ecological Inference for 2x2 Tables
+  by Kosuke Imai and Ying Lu
+  Copyright: GPL version 2 or later.
+*******************************************************************/
+#include <R_ext/Applic.h>
+
+void NormConstT(double *t, int n, void *param);
+void SuffExp(double *t, int n, void *param);
+double getLogLikelihood(Param* param) ;
+void setNormConst(Param* param);
+double getW2starFromW1star(double X, double Y, double W1, int* imposs);
+double getW1starFromW2star(double X, double Y, double W2, int* imposs);
+double getW1FromW2(double X, double Y, double W2);
+double getW1starFromT(double t, Param* param, int* imposs);
+double getW2starFromT(double t, Param* param, int* imposs);
+double getW1starPrimeFromT(double t, Param* param);
+double getW2starPrimeFromT(double t, Param* param);
+double paramIntegration(integr_fn f, void *ex);
+void setNormConst(Param* param);
+void setBounds(Param* param);
+
diff --git a/src/gibbsBase.c b/src/gibbsBase.c
index ed7782b..519d4f9 100644
--- a/src/gibbsBase.c
+++ b/src/gibbsBase.c
@@ -1,10 +1,5 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
 
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
diff --git a/src/gibbsBase2C.c b/src/gibbsBase2C.c
new file mode 100644
index 0000000..9eba601
--- /dev/null
+++ b/src/gibbsBase2C.c
@@ -0,0 +1,204 @@
+#include <stddef.h>
+#include <stdio.h>      
+#include <math.h>
+#include <Rmath.h>
+#include <R.h>
+#include "vector.h"
+#include "subroutines.h"
+#include "rand.h"
+#include "bayes.h"
+#include "sample.h"
+
+/* Normal Parametric Model for 2xC (with C > 2) Tables */
+void cBase2C(
+	     /*data input */
+	     double *pdX,     /* X: matrix */
+	     double *Y,       /* Y: vector */
+	     double *pdWmin,  /* lower bounds */
+	     double *pdWmax,  /* uppwer bounds */
+	     int *pin_samp,   /* sample size */
+	     int *pin_col,    /* number of columns */
+	     
+	     /*MCMC draws */
+	     int *reject,     /* whether to use rejection sampling */
+	     int *maxit,      /* max number of iterations for
+				 rejection sampling */
+	     int *n_gen,      /* number of gibbs draws */
+	     int *burn_in,    /* number of draws to be burned in */
+	     int *pinth,      /* keep every nth draw */
+	     int *verbose,    /* 1 for output monitoring */
+	     
+	     /* prior specification*/
+	     int *pinu0,      /* prior df parameter for InvWish */
+	     double *pdtau0,  /* prior scale parameter for Sigma */
+	     double *mu0,     /* prior mean for mu */
+	     double *pdS0,    /* prior scale for Sigma */
+
+	     /* starting values */
+	     double *mu,
+	     double *SigmaStart,
+	     
+	     /* storage */
+	     int *parameter,  /* 1 if save population parameter */
+	     double *pdSmu, 
+	     double *pdSSigma,
+	     double *pdSW
+	     ){	   
+  
+  /* some integers */
+  int n_samp = *pin_samp;    /* sample size */
+  int nth = *pinth;          /* keep every pth draw */
+  int n_col = *pin_col;      /* dimension */
+
+  /* prior parameters */ 
+  double tau0 = *pdtau0;                          /* prior scale for mu */
+  int nu0 = *pinu0;                               /* prior degrees of freedom */   
+  double **S0 = doubleMatrix(n_col, n_col);       /* prior scale for Sigma */
+
+  /* data */
+  double **X = doubleMatrix(n_samp, n_col);       /* X */
+  double **W = doubleMatrix(n_samp, n_col);       /* The W matrix */
+  double **Wstar = doubleMatrix(n_samp, n_col);   /* logit(W) */     
+
+  /* The lower and upper bounds of U = W*X/Y **/
+  double **minU = doubleMatrix(n_samp, n_col);
+  double **maxU = doubleMatrix(n_samp, n_col);    
+
+  /* model parameters */
+  double **Sigma = doubleMatrix(n_col, n_col);    /* The covariance matrix */
+  double **InvSigma = doubleMatrix(n_col, n_col); /* The inverse covariance matrix */
+
+  /* misc variables */
+  int i, j, k, main_loop;   /* used for various loops */
+  int itemp;
+  int itempM = 0; /* for mu */
+  int itempS = 0; /* for Sigma */
+  int itempW = 0; /* for W */
+  int itempC = 0; /* control nth draw */
+  int progress = 1, itempP = ftrunc((double) *n_gen/10);
+  double dtemp, dtemp1;
+  double *param = doubleArray(n_col);   /* Dirichlet parameters */
+  double *dvtemp = doubleArray(n_col);
+
+  /* get random seed */
+  GetRNGstate();
+  
+  /* read X */
+  itemp = 0;
+  for (j = 0; j < n_col; j++) 
+    for (i = 0; i < n_samp; i++) 
+      X[i][j] = pdX[itemp++];
+
+  /* read initial values of Sigma */
+  itemp = 0;
+  for (k = 0; k < n_col; k++) 
+    for (j = 0; j < n_col; j++) 
+      Sigma[j][k] = SigmaStart[itemp++];
+  dinv(Sigma, n_col, InvSigma);
+
+  /* compute bounds on U */
+  itemp = 0;
+  for (j = 0; j < n_col; j++) 
+    for (i = 0; i < n_samp; i++) 
+      minU[i][j] = fmax2(0, pdWmin[itemp++]*X[i][j]/Y[i]);
+  itemp = 0;
+  for (j = 0; j < n_col; j++) 
+    for (i = 0; i < n_samp; i++) 
+      maxU[i][j] = fmin2(1, pdWmax[itemp++]*X[i][j]/Y[i]);
+
+  /* initial values for W */
+  for (j = 0; j < n_col; j++)
+    param[j] = 1;
+  for (i = 0; i < n_samp; i++) {
+    k = 0; itemp = 1;
+    while (itemp > 0) { /* rejection sampling */
+      rDirich(dvtemp, param, n_col);
+      itemp = 0; k++;
+      for (j = 0; j < n_col; j++)
+	if (dvtemp[j] > maxU[i][j] || dvtemp[j] < minU[i][j])
+	  itemp++;
+      if (itemp == 0)
+	for (j = 0; j < n_col; j++)
+	  W[i][j] = dvtemp[j]*Y[i]/X[i][j];
+      if (k > *maxit) { /* if rejection sampling fails, then use
+			   midpoits of bounds sequentially */
+	itemp = 0;
+	dtemp = Y[i]; dtemp1 = 1;
+	for (j = 0; j < n_col-1; j++) {
+	  W[i][j] = 0.5*(fmax2(0,(X[i][j]/dtemp1+dtemp-1)*dtemp1/X[i][j])+
+			 fmin2(1,dtemp*dtemp1/X[i][j]));
+	  dtemp -= W[i][j]*X[i][j]/dtemp1;
+	  dtemp1 -= X[i][j];
+	}
+	W[i][n_col-1] = dtemp;
+      }
+    }
+    for (j = 0; j < n_col; j++) 
+      Wstar[i][j] = log(W[i][j])-log(1-W[i][j]);
+  }
+
+  /* read the prior */
+  itemp = 0;
+  for(k = 0; k < n_col; k++)
+    for(j = 0; j < n_col; j++) 
+      S0[j][k] = pdS0[itemp++];
+
+  /*** Gibbs sampler! ***/
+  if (*verbose)
+    Rprintf("Starting Gibbs sampler...\n");
+  for(main_loop = 0; main_loop < *n_gen; main_loop++){
+    /** update W, Wstar given mu, Sigma **/
+    for (i = 0; i < n_samp; i++){
+      rMH2c(W[i], X[i], Y[i], minU[i], maxU[i], mu, InvSigma, n_col,
+	    *maxit, *reject);
+      for (j = 0; j < n_col; j++) 
+	Wstar[i][j] = log(W[i][j])-log(1-W[i][j]);
+    }
+    
+    /* update mu, Sigma given wstar using effective sample of Wstar */
+    NIWupdate(Wstar, mu, Sigma, InvSigma, mu0, tau0, nu0, S0, n_samp, n_col);
+    
+    /*store Gibbs draw after burn-in and every nth draws */      
+    if (main_loop>=*burn_in){
+      itempC++;
+      if (itempC==nth){
+	for (j = 0; j < n_col; j++) {
+	  pdSmu[itempM++]=mu[j];
+	  for (k = 0; k < n_col; k++)
+	    if (j <=k)
+	      pdSSigma[itempS++]=Sigma[j][k];
+	}
+	for(i = 0; i < n_samp; i++)
+	  for (j = 0; j < n_col; j++)
+	    pdSW[itempW++] = W[i][j];
+	itempC=0;
+      }
+    } 
+    if (*verbose)
+      if (itempP == main_loop) {
+	Rprintf("%3d percent done.\n", progress*10);
+	itempP+=ftrunc((double) *n_gen/10); progress++;
+	R_FlushConsole();
+      }
+    R_CheckUserInterrupt();
+  } /* end of Gibbs sampler */ 
+
+  if(*verbose)
+    Rprintf("100 percent done.\n");
+
+  /** write out the random seed **/
+  PutRNGstate();
+
+  /* Freeing the memory */
+  FreeMatrix(S0, n_col);
+  FreeMatrix(X, n_samp);
+  FreeMatrix(W, n_samp);
+  FreeMatrix(Wstar, n_samp);
+  FreeMatrix(minU, n_samp);
+  FreeMatrix(maxU, n_samp);
+  FreeMatrix(Sigma, n_col);
+  FreeMatrix(InvSigma, n_col);
+  free(dvtemp);
+  free(param);
+} /* main */
+
diff --git a/src/gibbsBaseRC.c b/src/gibbsBaseRC.c
new file mode 100644
index 0000000..8e6ec51
--- /dev/null
+++ b/src/gibbsBaseRC.c
@@ -0,0 +1,285 @@
+#include <stddef.h>
+#include <stdio.h>      
+#include <math.h>
+#include <Rmath.h>
+#include <R_ext/Utils.h>
+#include <R.h>
+#include "vector.h"
+#include "subroutines.h"
+#include "rand.h"
+#include "bayes.h"
+#include "sample.h"
+
+/* Normal Parametric Model for RxC (with R >= 2, C >= 2) Tables */
+void cBaseRC(
+	     /*data input */
+	     double *pdX,     /* X */
+	     double *pdY,     /* Y */
+	     double *pdWmin,  /* lower bounds */
+	     double *pdWmax,  /* uppwer bounds */
+	     int *pin_samp,   /* sample size */
+	     int *pin_col,    /* number of columns */
+	     int *pin_row,    /* number of rows */
+
+	     /*MCMC draws */
+	     int *reject,     /* whether to use rejection sampling */
+	     int *maxit,      /* max number of iterations for
+				 rejection sampling */
+	     int *n_gen,      /* number of gibbs draws */
+	     int *burn_in,    /* number of draws to be burned in */
+	     int *pinth,      /* keep every nth draw */
+	     int *verbose,    /* 1 for output monitoring */
+	     
+	     /* prior specification*/
+	     int *pinu0,      /* prior df parameter for InvWish */
+	     double *pdtau0,  /* prior scale parameter for Sigma */
+	     double *mu0,     /* prior mean for mu */
+	     double *pdS0,    /* prior scale for Sigma */
+
+	     /* starting values */
+	     double *pdMu,
+	     double *pdSigma,
+	     
+	     /* storage */
+	     int *parameter,  /* 1 if save population parameter */
+	     double *pdSmu, 
+	     double *pdSSigma,
+	     double *pdSW
+	     ){	   
+  
+  /* some integers */
+  int n_samp = *pin_samp;    /* sample size */
+  int nth = *pinth;          /* keep every pth draw */
+  int n_col = *pin_col;      /* number of columns */
+  int n_dim = *pin_row-1;    /* number of rows - 1 */
+
+  /* prior parameters */ 
+  double tau0 = *pdtau0;                     /* prior scale */
+  int nu0 = *pinu0;                          /* prior degrees of freedom */   
+  double **S0 = doubleMatrix(n_col, n_col);  /* prior scale for InvWish */
+
+  /* data */
+  double **Y = doubleMatrix(n_samp, n_dim);               /* Y */
+  double **X = doubleMatrix(n_samp, n_col);               /* X */
+  double ***W = doubleMatrix3D(n_samp, n_dim, n_col);     /* W */
+  double ***Wstar = doubleMatrix3D(n_col, n_samp, n_dim); /* logratio(W) */       
+  double **Wsum = doubleMatrix(n_samp, n_col);            /* sum_{r=1}^{R-1} W_{irc} */
+  double **SWstar = doubleMatrix(n_col, n_dim);
+
+  /* The lower and upper bounds of U = W*X/Y **/
+  double ***minU = doubleMatrix3D(n_samp, n_dim, n_col);
+  double *maxU = doubleArray(n_col);
+
+  /* model parameters */
+  double **mu = doubleMatrix(n_col, n_dim);                 /* mean */
+  double ***Sigma = doubleMatrix3D(n_col, n_dim, n_dim);    /* covariance */
+  double ***InvSigma = doubleMatrix3D(n_col, n_dim, n_dim); /* inverse */
+
+  /* misc variables */
+  int i, j, k, l, main_loop;   /* used for various loops */
+  int itemp, counter;
+  int itempM = 0;           /* for mu */
+  int itempS = 0;           /* for Sigma */
+  int itempW = 0;           /* for W */
+  int itempC = 0;           /* control nth draw */
+  int progress = 1, itempP = ftrunc((double) *n_gen/10);
+  double dtemp, dtemp1;
+  double *param = doubleArray(n_col);   /* Dirichlet parameters */
+  double *dvtemp = doubleArray(n_col);
+  double *dvtemp1 = doubleArray(n_col);
+
+  /* get random seed */
+  GetRNGstate();
+  
+  /* read X */
+  itemp = 0;
+  for (k = 0; k < n_col; k++) 
+    for (i = 0; i < n_samp; i++) 
+      X[i][k] = pdX[itemp++];
+
+  /* read Y */
+  itemp = 0;
+  for (j = 0; j < n_dim; j++) 
+    for (i = 0; i < n_samp; i++) 
+      Y[i][j] = pdY[itemp++];
+
+  /* compute bounds on U */
+  itemp = 0; 
+  for (k = 0; k < n_col; k++) 
+    for (j = 0; j < n_dim; j++) 
+      for (i = 0; i < n_samp; i++) 
+	minU[i][j][k] = fmax2(0, (X[i][k]+Y[i][j]-1)/Y[i][j]);
+
+  /* initial values for mu and Sigma */
+  itemp = 0;
+  for (k = 0; k < n_col; k++)
+    for (j = 0; j < n_dim; j++)
+      mu[k][j] = pdMu[itemp++]; 
+  itemp = 0;
+  for (k = 0; k < n_col; k++)
+    for (j = 0; j < n_dim; j++) 
+      for (i = 0; i < n_dim; i++) 
+	Sigma[k][j][i] = pdSigma[itemp++];
+  for (k = 0; k < n_col; k++)
+    dinv(Sigma[k], n_dim, InvSigma[k]);
+  
+  /* initial values for W */
+  for (k = 0; k < n_col; k++)
+    param[k] = 1.0;
+  for (i = 0; i < n_samp; i++) {
+    for (k = 0; k < n_col; k++)
+      Wsum[i][k] = 0.0;
+    for (j = 0; j < n_dim; j++) {
+      counter = 0; itemp = 1; 
+      while (itemp > 0) { /* first try rejection sampling */
+	rDirich(dvtemp, param, n_col);
+	itemp = 0;
+	for (k = 0; k < n_col; k++) {
+	  if (dvtemp[k] < minU[i][j][k] || 
+	      dvtemp[k] > fmin2(1, X[i][k]*(1-Wsum[i][k])/Y[i][j]))
+	    itemp++;
+	}
+	if (itemp < 1) 
+	  for (k = 0; k < n_col; k++) {
+	    W[i][j][k] = dvtemp[k]*Y[i][j]/X[i][k];
+	    Wsum[i][k] += W[i][j][k];
+	  }
+	counter++;
+	if (counter > *maxit && itemp > 0) { /* if rejection sampling fails, then
+				   use midpoints of bounds */
+	  itemp = 0;
+	  dtemp = Y[i][j]; dtemp1 = 1;
+	  for (k = 0; k < n_col-1; k++) {
+	    W[i][j][k] = 0.25*(fmax2(0,(X[i][k]/dtemp1+dtemp-1)*dtemp1/X[i][k])+
+			      fmin2(1-Wsum[i][k],dtemp*dtemp1/X[i][k]));
+	    dtemp -= W[i][j][k]*X[i][k]/dtemp1;
+	    dtemp1 -= X[i][k];
+	    Wsum[i][k] += W[i][j][k];
+	  }
+	  W[i][j][n_col-1] = dtemp;
+	  Wsum[i][n_col-1] += dtemp;
+	}
+	R_CheckUserInterrupt();
+      }
+      for (l = 0; l < n_dim; l++) 
+	for (k = 0; k < n_col; k++)
+	  Wstar[k][i][l] = log(W[i][l][k])-log(1-Wsum[i][k]);
+    }
+  }
+
+  /* read the prior */
+  itemp = 0;
+  for(k = 0; k < n_dim; k++)
+    for(j = 0; j < n_dim; j++) 
+      S0[j][k] = pdS0[itemp++];
+
+  /*** Gibbs sampler! ***/
+  if (*verbose)
+    Rprintf("Starting Gibbs sampler...\n");
+  for(main_loop = 0; main_loop < *n_gen; main_loop++){
+    /** update W, Wstar given mu, Sigma **/
+    for (i = 0; i < n_samp; i++) {
+      /* sampling W through Metropolis Step for each row */
+      for (j = 0; j < n_dim; j++) {
+	/* computing upper bounds for U */
+	for (k = 0; k < n_col; k++) {
+	  Wsum[i][k] -= W[i][j][k];
+	  maxU[k] = fmin2(1, X[i][k]*(1-Wsum[i][k])/Y[i][j]);
+	}
+	/** MH step **/
+	/* Sample a candidate draw of W from truncated Dirichlet */
+	l = 0; itemp = 1;
+	while (itemp > 0) {
+	  rDirich(dvtemp, param, n_col);
+	  itemp = 0;
+	  for (k = 0; k < n_col; k++) 
+	    if (dvtemp[k] > maxU[k] || dvtemp[k] < minU[i][j][k])
+	      itemp++;
+	  l++;
+	  if (l > *maxit)
+	    error("rejection algorithm failed because bounds are too tight.\n increase maxit or use gibbs sampler instead.");
+	}
+	/* get W and its log-ratio transformation */
+	for (k = 0; k < n_col; k++) {
+	  dvtemp[k] = dvtemp[k]*Y[i][j]/X[i][k];
+	  dvtemp1[k] = Wsum[i][k]+dvtemp[k];
+	}
+	for (k = 0; k < n_col; k++) 
+	  for (l = 0; l < n_dim; l++) 
+	    if (l == j)
+	      SWstar[k][l] = log(dvtemp[k])-log(1-dvtemp1[k]);
+	    else
+	      SWstar[k][l] = log(W[i][j][k])-log(1-dvtemp1[k]);
+	/* computing acceptance ratio */
+	dtemp = 0; dtemp1 = 0;
+	for (k= 0; k < n_col; k++) {
+	  dtemp += dMVN(SWstar[k], mu[k], InvSigma[k], n_dim, 1);
+	  dtemp1 += dMVN(Wstar[k][i], mu[k], InvSigma[k], n_dim, 1);
+	  dtemp -= log(dvtemp[k]);
+	  dtemp1 -= log(W[i][j][k]);
+	}
+	if (unif_rand() < fmin2(1, exp(dtemp-dtemp1))) 
+	  for (k = 0; k < n_col; k++)
+	    W[i][j][k] = dvtemp[k]; 
+	/* updating Wsum and Wstar with new draws */
+	for (k = 0; k < n_col; k++) {
+	  Wsum[i][k] += W[i][j][k];
+	  for (l = 0; l < n_dim; l++) 
+	    Wstar[k][i][l] = log(W[i][l][k])-log(1-Wsum[i][k]);
+	}
+      }
+    }    
+    
+    /* update mu, Sigma given wstar using effective sample of Wstar */
+    for (k = 0; k < n_col; k++)
+      NIWupdate(Wstar[k], mu[k], Sigma[k], InvSigma[k], mu0, tau0,
+		nu0, S0, n_samp, n_dim); 
+    
+    /*store Gibbs draw after burn-in and every nth draws */     
+    if (main_loop >= *burn_in){
+      itempC++;
+      if (itempC==nth){
+	for (k = 0; k < n_col; k++) 
+	  for (j = 0; j < n_dim; j++) {
+	    pdSmu[itempM++]=mu[k][j];
+	    for (i = 0; i < n_dim; i++)
+	      if (j <= i)
+		pdSSigma[itempS++]=Sigma[k][j][i];
+	  }
+	for(i = 0; i < n_samp; i++)
+	  for (k = 0; k < n_col; k++)
+	    for (j = 0; j < n_dim; j++)
+	      pdSW[itempW++] = W[i][j][k];
+	itempC=0;
+      }
+    }
+    
+    if (*verbose)
+      if (itempP == main_loop) {
+	Rprintf("%3d percent done.\n", progress*10);
+	itempP+=ftrunc((double) *n_gen/10); progress++;
+	R_FlushConsole();
+      }
+    R_CheckUserInterrupt();
+  } /* end of Gibbs sampler */ 
+  if (*verbose)
+    Rprintf("100 percent done.\n");
+
+  /** write out the random seed **/
+  PutRNGstate();
+
+  /* Freeing the memory */
+  FreeMatrix(S0, n_col);
+  FreeMatrix(X, n_samp);
+  FreeMatrix(Y, n_samp);
+  Free3DMatrix(W, n_samp, n_dim);
+  Free3DMatrix(Wstar, n_col, n_samp);
+  FreeMatrix(Wsum, n_samp);
+  Free3DMatrix(minU, n_samp, n_dim);
+  FreeMatrix(mu, n_col);
+  Free3DMatrix(Sigma, n_col, n_dim);
+  Free3DMatrix(InvSigma, n_col, n_dim);
+  free(param);
+  free(dvtemp);
+} /* main */
+
diff --git a/src/gibbsDP.c b/src/gibbsDP.c
index dc8e521..2356322 100644
--- a/src/gibbsDP.c
+++ b/src/gibbsDP.c
@@ -1,10 +1,4 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
@@ -346,7 +340,7 @@ void cDPeco(
   }
   
   /*store Gibbs draws after burn_in */
-  R_CheckUserInterrupt();
+   R_CheckUserInterrupt();
   if (main_loop>=*burn_in) {
      itempC++;
     if (itempC==nth){
@@ -374,7 +368,7 @@ void cDPeco(
     if (itempP == main_loop) {
       Rprintf("%3d percent done.\n", progress*10);
       itempP+=ftrunc((double) *n_gen/10); progress++;
-      R_FlushConsole();
+       R_FlushConsole();
     }
   } /*end of MCMC for DP*/
   
@@ -382,7 +376,7 @@ void cDPeco(
     Rprintf("100 percent done.\n");
   
   /** write out the random seed **/
-  PutRNGstate();
+   PutRNGstate();
   
   /* Freeing the memory */
   FreeMatrix(S0, n_dim);
diff --git a/src/gibbsEM.c b/src/gibbsEM.c
new file mode 100644
index 0000000..83b7de2
--- /dev/null
+++ b/src/gibbsEM.c
@@ -0,0 +1,1434 @@
+#include <stddef.h>
+#include <stdio.h>
+#include <math.h>
+#include <R.h>
+#include <Rmath.h>
+#include <R_ext/PrtUtil.h>
+#include "vector.h"
+#include "subroutines.h"
+#include "rand.h"
+#include "sample.h"
+#include "bayes.h"
+#include "macros.h"
+#include "fintegrate.h"
+
+
+void readData(Param* params, int n_dim, double* pdX, double* sur_W, double* x1_W1, double* x0_W2,
+                int n_samp, int s_samp, int x1_samp, int x0_samp);
+void ecoSEM(double* optTheta, double* pdTheta, Param* params, double Rmat_old[7][7], double Rmat[7][7]);
+void ecoEStep(Param* params, double* suff);
+void ecoMStep(double* Suff, double* pdTheta, Param* params);
+void ecoMStepNCAR(double* Suff, double* pdTheta, Param* params);
+void ecoMStepCCAR(double* Suff, double* pdTheta, Param* params);
+void MStepHypTest(Param* params, double* pdTheta);
+void initTheta(double* pdTheta_in,Param* params, double* pdTheta);
+void initNCAR(Param* params, double* pdTheta);
+void setHistory(double* t_pdTheta, double loglik, int iter,setParam* setP,double history_full[][10]);
+int closeEnough(double* pdTheta, double* pdTheta_old, int len, double maxerr);
+int semDoneCheck(setParam* setP);
+void gridEStep(Param* params, int n_samp, int s_samp, int x1_samp, int x0_samp, double* suff, int verbose, double minW1, double maxW1);
+void transformTheta(double* pdTheta, double* t_pdTheta, int len, setParam* setP);
+void untransformTheta(double* t_pdTheta,double* pdTheta, int len, setParam* setP);
+void ncarFixedRhoTransform(double* pdTheta);
+void ncarFixedRhoUnTransform(double* pdTheta);
+
+void cEMeco(
+	    /*data input */
+	    double *pdX,         /* data (X, Y) */
+	    double *pdTheta_in,  /* Theta^ t
+				    CAR: mu1, mu2, var1, var2, rho
+				    NCAR: mu1, mu2, var1, var2, p13,p13,p12*/
+	    int *pin_samp,       /* sample size */
+
+	    /* loop vairables */
+	    int *iteration_max,          /* number of maximum interations */
+	    double *convergence,          /* abs value limit before stopping */
+
+	    /*incorporating survey data */
+	    int *survey,         /*1 if survey data available(W_1, W_2)
+				   0 not*/
+	    int *sur_samp,       /*sample size of survey data*/
+	    double *sur_W,       /*set of known W_1, W_2 */
+
+	    /*incorporating homeogenous areas */
+	    int *x1,       /* 1 if X=1 type areas available W_1 known,
+			      W_2 unknown */
+	    int *sampx1,   /* number X=1 type areas */
+	    double *x1_W1, /* values of W_1 for X1 type areas */
+
+	    int *x0,       /* 1 if X=0 type areas available W_2 known,
+			      W_1 unknown */
+	    int *sampx0,   /* number X=0 type areas */
+	    double *x0_W2, /* values of W_2 for X0 type areas */
+
+	    /* bounds of W1 */
+	    double *minW1, double *maxW1,
+
+	    /* options */
+	    int *flag,    /*0th (rightmost) bit: 1 = NCAR, 0=normal; 1st bit: 1 = fixed rho, 0 = not fixed rho*/
+	    int *verbosiosity,    /*How much to print out, 0=silent, 1=cycle, 2=data*/
+      int *calcLoglik,    /*How much to print out, 0=silent, 1=cycle, 2=data*/
+	    int *hypTest_L,   /* number of hypothesis constraints */
+	    double *optTheta,  /*optimal theta obtained from previous EM result; if set, then we're doing SEM*/
+
+	    /* storage */
+      //Theta under CAR: mu1,mu2,s1,s2,p12
+      //Theta under NCAR: mu_3, mu_1, mu_2, sig_3, sig_1, sig_2, r_13, r_23, r_12
+	    double *pdTheta,  /*EM result for Theta^(t+1) */
+	    double *Suff,      /*out put suffucient statistics (E(W_1i|Y_i),
+				E(E_1i*W_1i|Y_i..) when  conveges */
+      double *inSample, /* In Sample info */
+      double *DMmatrix,  /* DM matrix for SEM*/
+      int *itersUsed, /* number of iterations used */
+      double *history /* history of param (transformed) as well as logliklihood*/
+	    ){
+
+  int n_samp  = *pin_samp;    /* sample size */
+  int s_samp  = *survey ? *sur_samp : 0;     /* sample size of survey data */
+  int x1_samp = *x1 ? *sampx1 : 0;       /* sample size for X=1 */
+  int x0_samp = *x0 ? *sampx0 : 0;       /* sample size for X=0 */
+  int t_samp=n_samp+s_samp+x1_samp+x0_samp;  /* total sample size*/
+  int n_dim=2;        /* dimensions */
+
+  setParam setP;
+  //set options
+  setP.ncar=bit(*flag,0);
+  setP.fixedRho=bit(*flag,1);
+  setP.sem=bit(*flag,2) & (optTheta[2]!=-1.1);
+  setP.ccar=0; setP.ccar_nvar=0;
+
+  //hard-coded hypothesis test
+  //hypTest is the number of constraints.  hyptTest==0 when we're not checking a hypothesis
+  setP.hypTest=(*hypTest_L);
+  if (setP.hypTest>1) error("Unable to do hypothesis testing with more than one constraint");
+  if (setP.hypTest==1) {
+    setP.hypTestCoeff=doubleMatrix(setP.ncar ? 3 : 2,setP.hypTest);
+    setP.hypTestCoeff[0][0]=1; setP.hypTestCoeff[1][0]=-1;
+    if (setP.ncar) setP.hypTestCoeff[2][0]=0;
+    setP.hypTestResult=0;
+  }
+
+  setP.verbose=*verbosiosity;
+  if (setP.verbose>=1) Rprintf("OPTIONS::  Ncar: %s; Fixed Rho: %s; SEM: %s\n",setP.ncar==1 ? "Yes" : "No",
+   setP.fixedRho==1 ? "Yes" : "No",setP.sem==1 ? "Second run" : (bit(*flag,2)==1 ? "First run" : "No"));
+  setP.calcLoglik=*calcLoglik;
+  setP.convergence=*convergence;
+  setP.t_samp=t_samp; setP.n_samp=n_samp; setP.s_samp=s_samp; setP.x1_samp=x1_samp; setP.x0_samp=x0_samp;
+  int param_len=setP.ccar ? setP.ccar_nvar : (setP.ncar ? 9 : 5);
+  setP.param_len=param_len;
+  setP.pdTheta=doubleArray(param_len);
+  setP.suffstat_len=(setP.ncar ? 9 : 5);
+  setP.SigmaK=doubleMatrix(param_len,param_len); //CCAR
+  setP.InvSigmaK=doubleMatrix(param_len,param_len); //CCAR
+
+  /* model parameters */
+  //double **Sigma=doubleMatrix(n_dim,n_dim);/* inverse covariance matrix*/
+  //double **InvSigma=doubleMatrix(n_dim,n_dim);/* inverse covariance matrix*/
+
+  double *pdTheta_old=doubleArray(param_len);
+  double *t_pdTheta=doubleArray(param_len); //transformed theta
+  double *t_pdTheta_old=doubleArray(param_len);
+  double Rmat_old[7][7];
+  double Rmat[7][7];
+  double history_full[*iteration_max+1][10];
+
+  /* misc variables */
+  int i, j,main_loop, start;   /* used for various loops */
+
+  /* get random seed */
+  GetRNGstate();
+
+  //assign param
+  Param* params=(Param*) R_alloc(t_samp,sizeof(Param));
+
+  for(i=0;i<t_samp;i++) params[i].setP=&setP;
+  readData(params, n_dim, pdX, sur_W, x1_W1, x0_W2, n_samp, s_samp, x1_samp, x0_samp);
+
+
+
+/***Begin main loop ***/
+main_loop=1;start=1;
+while (main_loop<=*iteration_max && (start==1 ||
+        (setP.sem==0 && !closeEnough(t_pdTheta,t_pdTheta_old,param_len,*convergence)) ||
+        (setP.sem==1 && !semDoneCheck((setParam*)&setP)))) {
+//while (main_loop<=*iteration_max && (start==1 || !closeEnough(transformTheta(pdTheta),transformTheta(pdTheta_old),param_len,*convergence))) {
+
+  setP.iter=main_loop;
+  if (start) {
+    initTheta(pdTheta_in,params,pdTheta);
+    transformTheta(pdTheta,t_pdTheta,param_len, &setP);
+    setHistory(t_pdTheta,0,0,(setParam*)&setP,history_full);
+    if (!setP.ncar) {
+      for(i=0;i<t_samp;i++) {
+        params[i].caseP.mu[0] = pdTheta[0];
+        params[i].caseP.mu[1] = pdTheta[1];
+      }
+      setP.Sigma[0][0] = pdTheta[2];
+      setP.Sigma[1][1] = pdTheta[3];
+      setP.Sigma[0][1] = pdTheta[4]*sqrt(pdTheta[2]*pdTheta[3]);
+      setP.Sigma[1][0] = setP.Sigma[0][1];
+      dinv2D((double*)&setP.Sigma[0][0], 2, (double*)&setP.InvSigma[0][0], "Start of main loop");
+    }
+    else {
+      if (setP.fixedRho) ncarFixedRhoTransform(pdTheta);
+      initNCAR(params,pdTheta);
+      if (setP.fixedRho) ncarFixedRhoUnTransform(pdTheta);
+    }
+    start=0;
+  }
+  for(i=0;i<param_len;i++) setP.pdTheta[i]=pdTheta[i];
+
+  if (setP.verbose>=1) {
+    Rprintf("cycle %d/%d:",main_loop,*iteration_max);
+    for(i=0;i<param_len;i++)
+      if (setP.varParam[i])
+        Rprintf(" %.3f",pdTheta[i]);
+    if (setP.calcLoglik==1 && main_loop>2)
+      Rprintf(" Prev LL: %5g",Suff[setP.suffstat_len]);
+    Rprintf("\n");
+  }
+  //keep the old theta around for comaprison
+  for(i=0;i<param_len;i++) pdTheta_old[i]=pdTheta[i];
+  transformTheta(pdTheta_old,t_pdTheta_old,param_len,&setP);
+
+
+  ecoEStep(params, Suff);
+  if (!setP.ncar)
+    ecoMStep(Suff,pdTheta,params);
+  else
+    ecoMStepNCAR(Suff,pdTheta,params);
+  transformTheta(pdTheta,t_pdTheta,param_len,&setP);
+  //char ch;
+  //scanf(" %c", &ch );
+
+  //if we're in the second run through of SEM
+  if (setP.sem==1) {
+    ecoSEM(optTheta, pdTheta, params, Rmat_old, Rmat);
+  }
+  else {
+    setHistory(t_pdTheta,(main_loop<=1) ? 0 : Suff[setP.suffstat_len],main_loop,(setParam*)&setP,history_full);
+  }
+
+
+  if (setP.verbose>=2) {
+    Rprintf("theta and suff\n");
+    if (param_len>5) {
+      Rprintf("%10g%10g%10g%10g%10g%10g%10g%10g%10g\n",pdTheta[0],pdTheta[1],pdTheta[2],pdTheta[3],pdTheta[4],pdTheta[5],pdTheta[6],pdTheta[7],pdTheta[8]);
+    }
+    else {
+      Rprintf("%10g%10g%10g%10g%10g (%10g)\n",pdTheta[0],pdTheta[1],pdTheta[2],pdTheta[3],pdTheta[4],pdTheta[4]*sqrt(pdTheta[2]*pdTheta[3]));
+    }
+    Rprintf("%10g%10g%10g%10g%10g\n",Suff[0],Suff[1],Suff[2],Suff[3],Suff[4]);
+    Rprintf("Sig: %10g%10g%10g\n",setP.Sigma[0][0],setP.Sigma[1][1],setP.Sigma[0][1]);
+    if (setP.ncar) Rprintf("Sig3: %10g%10g%10g%10g\n",setP.Sigma3[0][0],setP.Sigma3[1][1],setP.Sigma3[2][2]);
+    //char x;
+    //R_ReadConsole("hit enter\n",(char*)&x,4,0);
+  }
+  main_loop++;
+  R_FlushConsole();
+  R_CheckUserInterrupt();
+}
+
+/***End main loop ***/
+//finish up: record results and loglik
+Param* param;
+Suff[setP.suffstat_len]=0.0;
+for(i=0;i<param_len;i++) setP.pdTheta[i]=pdTheta[i];
+for(i=0;i<t_samp;i++) {
+   param=&(params[i]);
+  if(i<n_samp) {
+   for(j=0;j<2;j++) inSample[i*2+j]=param->caseP.W[j];
+    //setBounds(param);
+    //setNormConst(param);
+  }
+  Suff[setP.suffstat_len]+=getLogLikelihood(param);
+}
+
+if (setP.verbose>=1) {
+  Rprintf("Final Theta:");
+    for(i=0;i<param_len;i++) Rprintf(" %.3f",pdTheta[i]);
+    if (setP.calcLoglik==1 && main_loop>2) {
+      Rprintf(" Final LL: %5g",Suff[setP.suffstat_len]);
+      history_full[main_loop-1][param_len]=Suff[setP.suffstat_len];
+    }
+    Rprintf("\n");
+  }
+
+//set the DM matrix (only matters for SEM)
+if (setP.sem==1) {
+  int DMlen=0;
+  for(i=0; i<param_len;i++)
+    if(setP.varParam[i]) DMlen++;
+  for(i=0;i<DMlen;i++)
+    for(j=0;j<DMlen;j++)
+      DMmatrix[i*DMlen+j]=Rmat[i][j];
+}
+
+*itersUsed=main_loop;
+for(i=0;i<(*itersUsed);i++) {
+  for(j=0;j<(param_len+1);j++)
+    history[i*(param_len+1)+j]=history_full[i][j];
+}
+
+
+/* write out the random seed */
+PutRNGstate();
+
+/* Freeing the memory */
+Free(pdTheta_old);
+//FreeMatrix(Rmat_old,5);
+//FreeMatrix(Rmat,5);
+}
+
+//initializes Theta, varParam, and semDone
+//input: pdTheta_in,params
+//mutates: params.setP, pdTheta
+//NCAR theta: mu_3, mu_1, mu_2, sig_3, sig_1, sig_2, r_13, r_23, r_12
+void initTheta(double* pdTheta_in,Param* params, double* pdTheta) {
+  setParam* setP=params[0].setP;
+  int param_len=setP->param_len;
+  int i;
+  if (!setP->ncar) {
+    for(i=0;i<param_len;i++) {
+      pdTheta[i]=pdTheta_in[i];
+      setP->varParam[i]=1;
+    }
+    if (setP->fixedRho) setP->varParam[4]=0;
+  }
+  else {
+    //constants
+    double lx,mu3sq;
+    pdTheta[0]=0; mu3sq=0;
+    for(i=0;i<setP->t_samp;i++) {
+      lx=logit(params[i].caseP.X,"initpdTheta0");
+      pdTheta[0] += lx;
+      mu3sq += lx*lx;
+    }
+    pdTheta[0] = pdTheta[0]/setP->t_samp;
+    mu3sq = mu3sq/setP->t_samp;
+    pdTheta[3] = mu3sq-pdTheta[0]*pdTheta[0]; //variance
+    //fill from pdTheta_in
+    pdTheta[1]=pdTheta_in[0];
+    pdTheta[2]=pdTheta_in[1];
+    pdTheta[4]=pdTheta_in[2];
+    pdTheta[5]=pdTheta_in[3];
+    pdTheta[6]=pdTheta_in[4];
+    pdTheta[7]=pdTheta_in[5];
+    pdTheta[8]=pdTheta_in[6];
+    for(i=0;i<param_len;i++) setP->varParam[i]=1;
+    setP->varParam[0]=0;setP->varParam[3]=0;
+    //if (setP->fixedRho) setP->varParam[8]=0;
+  }
+  int varlen=0;
+  for(i=0; i<param_len;i++)
+    if(setP->varParam[i]) varlen++;
+  for(i=0; i<varlen;i++)
+      setP->semDone[i]=0;
+}
+
+/**
+  * The E-step for parametric ecological inference
+  * Takes in a Param array of length n_samp + t_samp + x0_samp + x1_samp
+  * Suff should be an array with the same length as the number of params (+1)
+  * On exit: suff holds the sufficient statistics and loglik as follows
+  * CAR: (0) E[W1*] (1) E[W2*] (2) E[W1*^2] (3) E[W2*^2] (4) E[W1*W2*] (5) loglik
+  * NCAR: (0) X, (1) W1, (2) W2, (3) X^2, (4) W1^2, (5) W2^2, (6) x*W1, (7) X*W2, (8) W1*W2, (9) loglik
+ **/
+
+
+
+void ecoEStep(Param* params, double* suff) {
+
+int t_samp,n_samp,s_samp,x1_samp,x0_samp,i,j,temp0,temp1, verbose;
+double loglik,testdens;
+Param* param; setParam* setP; caseParam* caseP;
+setP=params[0].setP;
+verbose=setP->verbose;
+
+t_samp=setP->t_samp;
+n_samp=setP->n_samp;
+x1_samp=setP->x1_samp;
+x0_samp=setP->x0_samp;
+s_samp=setP->s_samp;
+
+  double **Wstar=doubleMatrix(t_samp,5);     /* pseudo data(transformed)*/
+loglik=0;
+if (verbose>=3 && !setP->sem) Rprintf("E-step start\n");
+  for (i = 0; i<n_samp; i++) {
+    param = &(params[i]);
+    caseP=&(param->caseP);
+    if (caseP->Y>=.990 || caseP->Y<=.010) { //if Y is near the edge, then W1 and W2 are very constrained
+      Wstar[i][0]=logit(caseP->Y,"Y maxmin W1");
+      Wstar[i][1]=logit(caseP->Y,"Y maxmin W2");
+      Wstar[i][2]=Wstar[i][0]*Wstar[i][0];
+      Wstar[i][3]=Wstar[i][0]*Wstar[i][1];
+      Wstar[i][4]=Wstar[i][1]*Wstar[i][1];
+      caseP->Wstar[0]=Wstar[i][0];
+      caseP->Wstar[1]=Wstar[i][1];
+      caseP->W[0]=caseP->Y;
+      caseP->W[1]=caseP->Y;
+      if (setP->calcLoglik==1 && setP->iter>1) loglik+=getLogLikelihood(param);
+      //Rprintf("Skipping %d, Y=%5g",i,caseP->Y);
+    }
+    else {
+      setBounds(param); //I think you only have to do this once...check later
+      /*if (verbose>=2 && setP->iter==12 && i==422) {
+        Rprintf("Bounds: %5g %5g %5g %5g\n",caseP->Wbounds[0][0],caseP->Wbounds[0][1],caseP->Wbounds[1][0],caseP->Wbounds[1][1]);
+        setP->weirdness=1;
+      }
+      else setP->weirdness=0;*/
+
+      setNormConst(param);
+      for (j=0;j<5;j++) {
+        caseP->suff=j;
+        Wstar[i][j]=paramIntegration(&SuffExp,param);
+        if (j<2)
+          caseP->Wstar[j]=Wstar[i][j];
+      }
+      caseP->suff=5;
+      caseP->W[0]=paramIntegration(&SuffExp,param);
+      caseP->suff=6;
+      caseP->W[1]=paramIntegration(&SuffExp,param);
+      caseP->suff=-1;
+      testdens=paramIntegration(&SuffExp,param);
+      if (setP->calcLoglik==1 && setP->iter>1) loglik+=getLogLikelihood(param);
+
+  //report E0 if norm const is extremely high or low
+  //if((caseP->mu[1] > 1.57685) && (caseP->mu[2]<-1.973)) {
+//Rprintf("HIT! %d %5g %5g %5g %5g %5g %5g %5g %5g err:%5g\n", i, caseP->X, caseP->Y, caseP->mu[0], caseP->mu[1], caseP->normcT,Wstar[i][0],Wstar[i][1],Wstar[i][2],fabs(caseP->W[0]-getW1FromW2(caseP->X, caseP->Y,caseP->W[1])));
+  //}
+  //if (fabs(caseP->normcT)<pow(10,-7) || fabs(caseP->normcT)>pow(10,10)) {
+   // Rprintf("E0 %d %5g %5g %5g %5g %5g %5g %5g %5g err:%5g\n", i, caseP->X, caseP->Y, caseP->mu[0], caseP->mu[1], caseP->normcT,Wstar[i][0],Wstar[i][1],Wstar[i][2],fabs(caseP->W[0]-getW1FromW2(caseP->X, caseP->Y,caseP->W[1])));
+  //}
+   //report error E1 if E[W1],E[W2] is not on the tomography line
+  if (fabs(caseP->W[0]-getW1FromW2(caseP->X, caseP->Y,caseP->W[1]))>0.011) {
+    Rprintf("E1 %d %5g %5g %5g %5g %5g %5g %5g %5g err:%5g\n", i, caseP->X, caseP->Y, caseP->mu[0], caseP->mu[1], caseP->normcT,Wstar[i][0],Wstar[i][1],Wstar[i][2],fabs(caseP->W[0]-getW1FromW2(caseP->X, caseP->Y,caseP->W[1])));
+    char ch;
+    scanf("Hit enter to continue %c\n", &ch );
+  }
+  //report error E2 if Jensen's inequality doesn't hold
+  if (Wstar[i][4]<pow(Wstar[i][1],2) || Wstar[i][2]<pow(Wstar[i][0],2))
+     Rprintf("E2 %d %5g %5g %5g %5g %5g %5g %5g %5g\n", i, caseP->X, caseP->Y, caseP->normcT, caseP->mu[1],Wstar[i][0],Wstar[i][1],Wstar[i][2],Wstar[i][4]);
+  //used for debugging if necessary
+  if (verbose>=2 && !setP->sem && ((i<10 && verbose>=3) || (caseP->mu[1] < -1.7 && caseP->mu[0] > 1.4)))
+     Rprintf("%d %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f %5.2f\n", i, caseP->X, caseP->Y, caseP->mu[0], caseP->mu[1], param->setP->Sigma[0][1], caseP->normcT, caseP->W[0],caseP->W[1],Wstar[i][2]);
+    }
+  }
+
+    /* analytically compute E{W2_i|Y_i} given W1_i, mu and Sigma in x1 homeogeneous areas */
+    for (i=n_samp; i<n_samp+x1_samp; i++) {
+      temp0=params[i].caseP.Wstar[0];
+      temp1=params[i].caseP.mu[1]+setP->Sigma[0][1]/setP->Sigma[0][0]*(temp0-params[i].caseP.mu[0]);
+      Wstar[i][0]=temp0;
+      Wstar[i][1]=temp1;
+      Wstar[i][2]=temp0*temp0;
+      Wstar[i][3]=temp0*temp1;
+      Wstar[i][4]=temp1*temp1;
+    }
+
+  /*analytically compute E{W1_i|Y_i} given W2_i, mu and Sigma in x0 homeogeneous areas */
+    for (i=n_samp+x1_samp; i<n_samp+x1_samp+x0_samp; i++) {
+      temp1=params[i].caseP.Wstar[1];
+      temp0=params[i].caseP.mu[0]+setP->Sigma[0][1]/setP->Sigma[1][1]*(temp1-params[i].caseP.mu[1]);
+      Wstar[i][0]=temp0;
+      Wstar[i][1]=temp1;
+      Wstar[i][2]=temp0*temp0;
+      Wstar[i][3]=temp0*temp1;
+      Wstar[i][4]=temp1*temp1;
+    }
+
+    /* Use the values given by the survey data */
+    //Calculate loglik also
+    for (i=n_samp+x1_samp+x0_samp; i<n_samp+x1_samp+x0_samp+s_samp; i++) {
+      param = &(params[i]);
+      caseP=&(param->caseP);
+      Wstar[i][0]=caseP->Wstar[0];
+      Wstar[i][1]=caseP->Wstar[1];
+      Wstar[i][2]=Wstar[i][0]*Wstar[i][0];
+      Wstar[i][3]=Wstar[i][0]*Wstar[i][1];
+      Wstar[i][4]=Wstar[i][1]*Wstar[i][1];
+      if (setP->calcLoglik==1 && setP->iter>1) loglik+=getLogLikelihood(param);
+    }
+
+
+
+  /*Calculate sufficient statistics */
+  for (j=0; j<setP->suffstat_len; j++)
+    suff[j]=0;
+
+
+  //CAR: (0) E[W1*] (1) E[W2*] (2) E[W1*^2] (3) E[W2*^2] (4) E[W1*W2*] (5) loglik
+  //NCAR: (0) X, (1) W1, (2) W2, (3) X^2, (4) W1^2, (5) W2^2, (6) x*W1, (7) X*W2, (8) W1*W2, (9) loglik
+  /* compute sufficient statistics */
+  for (i=0; i<t_samp; i++) {
+    if (!setP->ncar) {
+      suff[0] += Wstar[i][0];  /* sumE(W_i1|Y_i) */
+      suff[1] += Wstar[i][1];  /* sumE(W_i2|Y_i) */
+      suff[2] += Wstar[i][2];  /* sumE(W_i1^2|Y_i) */
+      suff[3] += Wstar[i][4];  /* sumE(W_i2^2|Y_i) */
+      suff[4] += Wstar[i][3];  /* sumE(W_i1*W_i2|Y_i) */
+    }
+    else if (setP->ncar) {
+      double lx= logit(params[i].caseP.X,"mstep X");
+      suff[0] += lx;
+      suff[1] += Wstar[i][0];
+      suff[2] += Wstar[i][1];
+      suff[3] += lx*lx;
+      suff[4] += Wstar[i][2];
+      suff[5] += Wstar[i][4];
+      suff[6] += params[i].caseP.Wstar[0]*lx;
+      suff[7] += params[i].caseP.Wstar[1]*lx;
+      suff[8] += Wstar[i][3];
+    }
+  }
+
+  for(j=0; j<setP->suffstat_len; j++)
+    suff[j]=suff[j]/t_samp;
+//Rprintf("%5g suff0,2,4 %5g %5g %5g\n",setP->pdTheta[6],suff[0],suff[2],suff[4]);
+  //if(verbose>=1) Rprintf("Log liklihood %15g\n",loglik);
+  suff[setP->suffstat_len]=loglik;
+
+FreeMatrix(Wstar,t_samp);
+}
+
+//Standard M-Step
+//input: Suff
+//output or mutated: pdTheta, params
+void ecoMStep(double* Suff, double* pdTheta, Param* params) {
+
+int i;
+setParam* setP=params[0].setP;
+
+  pdTheta[0]=Suff[0];  /*mu1*/
+  pdTheta[1]=Suff[1];  /*mu2*/
+
+if (setP->hypTest>0) {
+  MStepHypTest(params,pdTheta);
+}
+
+  if (!setP->fixedRho) { //standard
+    pdTheta[2]=Suff[2]-2*Suff[0]*pdTheta[0]+pdTheta[0]*pdTheta[0];  //sigma11
+    pdTheta[3]=Suff[3]-2*Suff[1]*pdTheta[1]+pdTheta[1]*pdTheta[1];  //sigma22
+    pdTheta[4]=Suff[4]-Suff[0]*pdTheta[1]-Suff[1]*pdTheta[0]+pdTheta[0]*pdTheta[1]; //sigma12
+    pdTheta[4]=pdTheta[4]/sqrt(pdTheta[2]*pdTheta[3]); /*rho*/
+  }
+  else { //fixed rho
+
+    double Imat[2][2];
+    Imat[0][0]=Suff[2]-2*pdTheta[0]*Suff[0]+pdTheta[0]*pdTheta[0];  //I_11
+    Imat[1][1]=Suff[3]-2*Suff[1]*pdTheta[1]+pdTheta[1]*pdTheta[1];  //I_22
+    Imat[0][1]=Suff[4]-Suff[0]*pdTheta[1]-Suff[1]*pdTheta[0]+pdTheta[0]*pdTheta[1];  //I_12
+    pdTheta[2]=(Imat[0][0]-pdTheta[4]*Imat[0][1]*pow(Imat[0][0]/Imat[1][1],0.5))/(1-pdTheta[4]*pdTheta[4]); //sigma11
+    pdTheta[3]=(Imat[1][1]-pdTheta[4]*Imat[0][1]*pow(Imat[1][1]/Imat[0][0],0.5))/(1-pdTheta[4]*pdTheta[4]); //sigma22
+    //sigma12 will be determined below by rho
+  }
+
+    //set Sigma
+  setP->Sigma[0][0] = pdTheta[2];
+  setP->Sigma[1][1] = pdTheta[3];
+  setP->Sigma[0][1] = pdTheta[4]*sqrt(pdTheta[2]*pdTheta[3]);
+  setP->Sigma[1][0] = setP->Sigma[0][1];
+
+  //if(setP->verbose>=3) Rprintf("Sigma mstep: %5g %5g %5g %5g\n",setP->Sigma[0][0],setP->Sigma[0][1],setP->Sigma[1][0],setP->Sigma[1][1]);
+  dinv2D((double*)(&(setP->Sigma[0][0])), 2, (double*)(&(setP->InvSigma[0][0])),"regular M-step");
+
+  /* assign each data point the new mu (same for all points) */
+  for(i=0;i<setP->t_samp;i++) {
+    params[i].caseP.mu[0]=pdTheta[0];
+    params[i].caseP.mu[1]=pdTheta[1];
+  }
+}
+
+
+//M-Step under NCAR
+//NCAR: (0) X, (1) W1, (2) W2, (3) X^2, (4) W1^2, (5) W2^2, (6) x*W1, (7) X*W2, (8) W1*W2, (9) loglik
+void ecoMStepNCAR(double* Suff, double* pdTheta, Param* params) {
+
+  setParam* setP=params[0].setP;
+  //double Sigma[2][2]=setP->Sigma;
+  //double[2][2] InvSigma=setP->InvSigma;
+  //double[3][3] Sigma3=setP->Sigma3;   /* covariance matrix*/
+  //double[3][3] InvSigma3=setP->Sigma3;   /* inverse covariance matrix*/
+  int ii,i,j,verbose,t_samp;
+  verbose=setP->verbose;
+  t_samp=setP->t_samp;
+
+
+  //set E[XW*]
+  double XW1=Suff[6];
+  double XW2=Suff[7];
+
+
+
+  //for(i = 0;i<9; i++) Rprintf("%f5.2\n",pdTheta[i]);
+  if (!setP->fixedRho) { //variable rho
+
+
+    //pdTheta[0] is const
+    pdTheta[1]=Suff[1];  /*mu1*/
+    pdTheta[2]=Suff[2];  /*mu2*/
+
+    //set variances and correlations
+    //pdTheta[3] is const
+    pdTheta[4]=Suff[4]-2*Suff[1]*pdTheta[1]+pdTheta[1]*pdTheta[1]; //s11
+    pdTheta[5]=Suff[5]-2*Suff[2]*pdTheta[2]+pdTheta[2]*pdTheta[2]; //s22
+    pdTheta[6]=(XW1 - pdTheta[0]*Suff[1])/sqrt((Suff[4] - Suff[1]*Suff[1])*pdTheta[3]); //rho_13
+    pdTheta[7]=(XW2 - pdTheta[0]*Suff[2])/sqrt((Suff[5] - Suff[2]*Suff[2])*pdTheta[3]); //rho_23
+    pdTheta[8]=Suff[8]-Suff[1]*pdTheta[2]-Suff[2]*pdTheta[1]+pdTheta[1]*pdTheta[2]; //sigma12
+    pdTheta[8]=pdTheta[8]/sqrt(pdTheta[4]*pdTheta[5]); //rho_12
+
+
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1, (5) sig_2, (6) r_13, (7) r_23, (8) r_12
+    //variances
+    setP->Sigma3[0][0] = pdTheta[4];
+    setP->Sigma3[1][1] = pdTheta[5];
+    setP->Sigma3[2][2] = pdTheta[3];
+
+    //covariances
+    setP->Sigma3[0][1] = pdTheta[8]*sqrt(pdTheta[4]*pdTheta[5]);
+    setP->Sigma3[0][2] = pdTheta[6]*sqrt(pdTheta[4]*pdTheta[3]);
+    setP->Sigma3[1][2] = pdTheta[7]*sqrt(pdTheta[5]*pdTheta[3]);
+
+    //symmetry
+    setP->Sigma3[1][0] = setP->Sigma3[0][1];
+    setP->Sigma3[2][0] = setP->Sigma3[0][2];
+    setP->Sigma3[2][1] = setP->Sigma3[1][2];
+              //if (verbose>=2) {
+            //Rprintf("Sigma3: %5g %5g %5g %5g %5g\n",setP->Sigma3[0][0],setP->Sigma3[0][1],setP->Sigma3[1][1],setP->Sigma3[1][2],setP->Sigma3[2][2]);
+          //}
+
+  }
+  else { //fixed rho
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1 | 3, (5) sig_2 | 3, (6) beta1, (7) beta2, (8) r_12 | 3
+
+    ncarFixedRhoTransform(pdTheta); //need the fixed param (pdTheta[8]) to be the conditional correlation
+
+    //CODE BLOCK D
+    //compute beta based on previous sigma
+    //beta is mu1,beta1,mu2,beta, which are pdTheta 1,2,6,7
+    double **InvSigma=doubleMatrix(2,2);
+    double **Zmat=doubleMatrix(4,2);
+    double **Zmat_t=doubleMatrix(2,4);
+    double **tmp41=doubleMatrix(4,1);
+    double **tmp42=doubleMatrix(4,2);
+    double **tmp44=doubleMatrix(4,4);
+    double **tmp21=doubleMatrix(2,1);
+    double **denom=doubleMatrix(4,4);
+    double **numer=doubleMatrix(4,1);
+    for (i=0;i<4;i++) {
+      for(j=0;j<4;j++) {
+        if (j<2) {
+          if (i<2) InvSigma[i][j]=setP->InvSigma[i][j];
+          Zmat[i][j]=0; Zmat_t[j][i]=0;
+        }
+        denom[i][j]=0;
+      }
+      numer[i][0]=0;
+    }
+//Rprintf("InvSigma %5g %5g %5g\n",InvSigma[0][0],InvSigma[1][1],InvSigma[0][1]);
+    for(ii=0;ii<setP->t_samp;ii++) {
+        double lx=logit(params[ii].caseP.X,"NCAR beta");
+        for(j=0;j<2;j++) {
+          Zmat_t[j][j*2+1]=lx - pdTheta[0];
+          Zmat_t[j][j*2]=1;
+          Zmat[j*2+1][j]=lx - pdTheta[0];
+          Zmat[j*2][j]=1;
+        }
+        matrixMul(Zmat,InvSigma,4,2,2,2,tmp42);
+        matrixMul(tmp42,Zmat_t,4,2,2,4,tmp44);
+        for (i=0;i<4;i++)
+          for(j=0;j<4;j++)
+            denom[i][j]+=tmp44[i][j];
+        //for (i=0;i<2;i++) tmp21[i][0]=(params[ii].caseP.Wstar[i] - pdTheta[i+1]); //Wtilde ??
+        for (i=0;i<2;i++) tmp21[i][0]=params[ii].caseP.Wstar[i]; //Wstar
+        //matrixMul(Zmat,InvSigma,4,2,2,2,tmp42);  //no need to repeat calculation
+        matrixMul(tmp42,tmp21,4,2,2,1,tmp41);
+        for (i=0;i<4;i++) numer[i][0]+=tmp41[i][0];
+    }
+    dinv(denom,4,denom);
+    matrixMul(denom,numer,4,4,4,1,numer);
+
+    pdTheta[1]=numer[0][0]; //mu1
+    pdTheta[6]=numer[1][0]; //beta1
+    pdTheta[2]=numer[2][0]; //mu2
+    pdTheta[7]=numer[3][0]; //beta2
+    //pdTheta[8] is constant
+//Rprintf("Compare Suff1 %5g to pdT1 %5g \n",Suff[1],pdTheta[1]);
+//Rprintf("Compare Suff2 %5g to pdT2 %5g \n",Suff[2],pdTheta[2]);
+
+    if (setP->hypTest>0) {
+      MStepHypTest(params,pdTheta);
+    }
+
+    //CAR: (0) E[W1*] (1) E[W2*] (2) E[W1*^2] (3) E[W2*^2] (4) E[W1*W2*] (5) loglik
+    //NCAR: (0) X, (1) W1, (2) W2, (3) X^2, (4) W1^2, (5) W2^2, (6) x*W1, (7) X*W2, (8) W1*W2, (9) loglik
+    //0->1, 1->2, 2->4, 3->5, 4->8
+
+
+    //CODE BLOCK C
+    //Compute sigma conditional on beta
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1 | 3, (5) sig_2 | 3, (6) beta1, (7) beta2, (8) r_12 | 3
+    double Smat[2][2]; //the S matrix (divided by n) in the paper
+    double Tmat[2][2]; //the T matrix (divided by n) in the paper
+    double S1=Suff[1]; //S_1 = Sufficient stat of W1* - beta1 * (sum of [(X_i - \mu3)]) ; second term goes to zero
+    double S2=Suff[2]; //S_2 =  Sufficient stat of W2*
+
+    Smat[0][0]=Suff[4] - 2*pdTheta[6]*(XW1 - pdTheta[0]*Suff[1]) + pdTheta[6]*pdTheta[6]*pdTheta[3];  //S_11
+    Smat[1][1]=Suff[5] - 2*pdTheta[7]*(XW2 - pdTheta[0]*Suff[2]) + pdTheta[7]*pdTheta[7]*pdTheta[3];  //S_22
+    Smat[0][1]=Suff[8] - pdTheta[6]*(XW2 - pdTheta[0]*Suff[2]) - pdTheta[7]*(XW1 - pdTheta[0]*Suff[1]) + pdTheta[6]*pdTheta[7]*pdTheta[3] ;  //S_12
+    Tmat[0][0]=Smat[0][0] - S1*S1;
+    Tmat[1][1]=Smat[1][1] - S2*S2;
+    Tmat[0][1]=Smat[0][1] - S1*S2;
+    pdTheta[4]=(Tmat[0][0]-pdTheta[8]*Tmat[0][1]*pow(Tmat[0][0]/Tmat[1][1],0.5))/(1-pdTheta[8]*pdTheta[8]); //sigma11 | 3
+    pdTheta[5]=(Tmat[1][1]-pdTheta[8]*Tmat[0][1]*pow(Tmat[1][1]/Tmat[0][0],0.5))/(1-pdTheta[8]*pdTheta[8]); //sigma22 | 3
+
+    //variances
+    //CODE BLOCK B
+    setP->Sigma3[0][0] = pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3];
+    setP->Sigma3[1][1] = pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3];
+    setP->Sigma3[2][2] = pdTheta[3];
+
+    //covariances
+    setP->Sigma3[0][1] = (pdTheta[8]*sqrt(pdTheta[4]*pdTheta[5]) + pdTheta[6]*pdTheta[7]*pdTheta[3])/
+                          (sqrt((pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3])*(pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3])));//rho_12 unconditional
+    setP->Sigma3[0][1] = setP->Sigma3[0][1]*sqrt(setP->Sigma3[0][0]*setP->Sigma3[1][1]); //sig_12
+    setP->Sigma3[0][2] = pdTheta[6]*sqrt((pdTheta[3])/(pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3]))*sqrt(setP->Sigma3[0][0]*setP->Sigma3[2][2]);
+    setP->Sigma3[1][2] = pdTheta[7]*sqrt((pdTheta[3])/(pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3]))*sqrt(setP->Sigma3[1][1]*setP->Sigma3[2][2]);
+
+    //symmetry
+    setP->Sigma3[1][0] = setP->Sigma3[0][1];
+    setP->Sigma3[2][0] = setP->Sigma3[0][2];
+    setP->Sigma3[2][1] = setP->Sigma3[1][2];
+  }
+  dinv2D((double*)(&(setP->Sigma3[0][0])), 3, (double*)(&(setP->InvSigma3[0][0])),"NCAR M-step S3");
+  initNCAR(params,pdTheta);
+  if (setP->fixedRho) ncarFixedRhoUnTransform(pdTheta);
+}
+
+//M-Step under CCAR
+void ecoMStepCCAR(double* Suff, double* pdTheta, Param* params) {
+    setParam* setP=params[0].setP;
+    int k=setP->ccar_nvar;
+    int ii,i,j,verbose,t_samp;
+    verbose=setP->verbose;
+    t_samp=setP->t_samp;
+    double **InvSigma=doubleMatrix(2,2);
+    double **Z_i=doubleMatrix(k,2);
+    double **Z_i_t=doubleMatrix(2,k);
+    double **tmpk1=doubleMatrix(k,1);
+    double **tmpk2=doubleMatrix(k,2);
+    double **tmpkk=doubleMatrix(k,k);
+    double **tmp21=doubleMatrix(2,1);
+    double **tmp21_b=doubleMatrix(2,1);
+    double **tmp12=doubleMatrix(1,2);
+    double **tmp22=doubleMatrix(2,2);
+    double **denom=doubleMatrix(k,k);
+    double **numer=doubleMatrix(k,1);
+//betas
+    for (i=0;i<k;i++) {
+      for(j=0;j<k;j++) {
+        if (j<2) {
+          if (i<2) InvSigma[i][j]=setP->InvSigma[i][j];
+        }
+        denom[i][j]=0;
+      }
+      numer[i][0]=0;
+    }
+//Rprintf("InvSigma %5g %5g %5g\n",InvSigma[0][0],InvSigma[1][1],InvSigma[0][1]);
+    for(ii=0;ii<setP->t_samp;ii++) {
+      for (i=0;i<k;i++) {
+        for(j=0;j<k;j++) {
+          Z_i[i][j]=params[ii].caseP.Z_i[i][j];
+          Z_i_t[i][j]=params[ii].caseP.Z_i[j][i];
+        }
+      }
+        matrixMul(Z_i,InvSigma,k,2,2,2,tmpk2);
+        matrixMul(tmpk2,Z_i_t,k,2,2,k,tmpkk);
+        for (i=0;i<k;i++)
+          for(j=0;j<k;j++)
+            denom[i][j]+=tmpkk[i][j];
+        for (i=0;i<2;i++) tmp21[i][0]=params[ii].caseP.Wstar[i]; //Wstar
+        matrixMul(tmpk2,tmp21,k,2,2,1,tmpk1);
+        for (i=0;i<k;i++) numer[i][0]+=tmpk1[i][0];
+    }
+    dinv(denom,k,denom);
+    matrixMul(denom,numer,k,k,k,1,numer);
+    for(i=0; i<k;i++) pdTheta[i]=numer[i][0]; //betas
+
+
+    if (setP->hypTest>0) {
+      MStepHypTest(params,pdTheta);
+    }
+
+//conditional Sigma
+    //start at 0
+    for(i=0; i<2;i++)
+      for(j=0; j<2;j++)
+        setP->Sigma[i][j] = 0;
+
+
+    for(ii=0;ii<setP->t_samp;ii++) {
+      for (i=0;i<k;i++) {
+        for(j=0;j<k;j++) {
+          Z_i_t[i][j]=params[ii].caseP.Z_i[j][i];
+        }
+      }
+      matrixMul(Z_i_t,numer,2,k,k,1,tmp21_b);
+      for (i=0;i<2;i++) tmp21[i][0]=params[ii].caseP.Wstar[i]; //Wstar
+      for (i=0;i<2;i++) tmp21[i][0] = tmp21[i][0] - tmp21_b[i][0]; //Wstar - Z_t*B
+      for (i=0;i<2;i++) tmp12[0][i] = tmp21[i][0]; //invserse
+      matrixMul(tmp21,tmp12,2,1,1,2,tmp22);
+      for(i=0; i<2;i++)
+        for(j=0; j<2;j++)
+          setP->Sigma[i][j] += tmp22[i][j];
+    }
+    dinv2D((double*)(&(setP->Sigma[0][0])), 2, (double*)(&(setP->InvSigma[0][0])),"CCAR M-step S2");
+
+    //variances
+    //CODE BLOCK B
+    setP->Sigma3[0][0] = pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3];
+    setP->Sigma3[1][1] = pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3];
+    setP->Sigma3[2][2] = pdTheta[3];
+
+    //covariances
+    setP->Sigma3[0][1] = (pdTheta[8]*sqrt(pdTheta[4]*pdTheta[5]) + pdTheta[6]*pdTheta[7]*pdTheta[3])/
+                          (sqrt((pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3])*(pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3])));//rho_12 unconditional
+    setP->Sigma3[0][1] = setP->Sigma3[0][1]*sqrt(setP->Sigma3[0][0]*setP->Sigma3[1][1]); //sig_12
+    setP->Sigma3[0][2] = pdTheta[6]*sqrt((pdTheta[3])/(pdTheta[4] + pdTheta[6]*pdTheta[6]*pdTheta[3]))*sqrt(setP->Sigma3[0][0]*setP->Sigma3[2][2]);
+    setP->Sigma3[1][2] = pdTheta[7]*sqrt((pdTheta[3])/(pdTheta[5] + pdTheta[7]*pdTheta[7]*pdTheta[3]))*sqrt(setP->Sigma3[1][1]*setP->Sigma3[2][2]);
+
+    //symmetry
+    setP->Sigma3[1][0] = setP->Sigma3[0][1];
+    setP->Sigma3[2][0] = setP->Sigma3[0][2];
+    setP->Sigma3[2][1] = setP->Sigma3[1][2];
+
+  dinv2D((double*)(&(setP->Sigma3[0][0])), 3, (double*)(&(setP->InvSigma3[0][0])),"NCAR M-step S3");
+  initNCAR(params,pdTheta);
+
+}
+
+/**
+ * Exta M-Step for hypothesis testing
+ * Mutates pdTheta
+ */
+void MStepHypTest(Param* params, double* pdTheta) {
+  setParam* setP=params[0].setP;
+  double offset,denom;
+  int dim,i,j,l,k;
+  dim=setP->ncar ? 3 : 2;
+  l=setP->hypTest;
+  double** Sigma=doubleMatrix(dim,dim);
+  double** temp_LbyD=doubleMatrix(l,dim);
+  double** temp_DbyL=doubleMatrix(dim,l);
+  double** temp_LbyL=doubleMatrix(l,l);
+
+  for(i=0;i<dim;i++)
+    for(j=0;j<dim;j++) {
+      if (dim==3) {
+        Sigma[i][j]=setP->Sigma3[i][j];
+      }
+      else {
+        Sigma[i][j]=setP->Sigma[i][j];
+      }
+    }
+  //transpose
+  double** hypTestCoeffT=doubleMatrix(l,dim);
+  for(i=0;i<dim;i++) hypTestCoeffT[0][i]=setP->hypTestCoeff[i][0];
+
+  //numerator
+  for(k=0;k<2;k++) temp_DbyL[k][0]=0;
+  for(i=0;i<setP->t_samp;i++) {
+    temp_DbyL[0][0]+=params[i].caseP.Wstar[0];
+    temp_DbyL[1][0]+=params[i].caseP.Wstar[1];
+  }
+  matrixMul(hypTestCoeffT,temp_DbyL,l,dim,dim,l,temp_LbyL);
+  temp_LbyL[0][0]=temp_LbyL[0][0]-(setP->t_samp*setP->hypTestResult);
+  matrixMul(Sigma,setP->hypTestCoeff,dim,dim,dim,l,temp_DbyL);
+  for(k=0;k<2;k++) temp_DbyL[k][0]*=temp_LbyL[0][0];
+
+  //denominator
+  //matrixMul(hypTestCoeffT,InvSigma,l,dim,dim,dim,temp_LbyD);
+  matrixMul(hypTestCoeffT,Sigma,l,dim,dim,dim,temp_LbyD);
+  matrixMul(temp_LbyD,setP->hypTestCoeff,l,dim,dim,l,temp_LbyL);
+  denom=setP->t_samp*temp_LbyL[0][0];
+
+  //offset theta
+  for(k=0;k<2;k++) {
+   offset=temp_DbyL[k][0]/denom;
+   int kindex= (setP->ncar) ? (k+1) : k;
+   pdTheta[kindex]=pdTheta[kindex]-offset;
+  }
+
+}
+
+
+//NCAR initialize
+//note that for fixed rho, the input is the UNTRANSFORMED PARAMETERS
+void initNCAR(Param* params, double* pdTheta) {
+  setParam* setP=params[0].setP;
+    int i;
+  if (!setP->fixedRho) { //variable rho
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1, (5) sig_2, (6) r_13, (7) r_23, (8) r_12
+
+    setP->Sigma[0][0]= pdTheta[4]*(1 - pdTheta[6]*pdTheta[6]);
+    setP->Sigma[1][1]= pdTheta[5]*(1 - pdTheta[7]*pdTheta[7]);
+    setP->Sigma[0][1]= (pdTheta[8] - pdTheta[6]*pdTheta[7])/sqrt((1 - pdTheta[6]*pdTheta[6])*(1 - pdTheta[7]*pdTheta[7])); //correlation
+    setP->Sigma[0][1]= setP->Sigma[0][1]*sqrt(setP->Sigma[0][0]*setP->Sigma[1][1]); //covar
+    setP->Sigma[1][0]= setP->Sigma[0][1]; //symmetry
+    dinv2D((double*)(&(setP->Sigma[0][0])), 2, (double*)(&(setP->InvSigma[0][0])),"NCAR M-step S2");
+    //assign each data point the new mu (different for each point)
+    for(i=0;i<setP->t_samp;i++) {
+      params[i].caseP.mu[0]=pdTheta[1] + pdTheta[6]*sqrt(pdTheta[4]/pdTheta[3])*(logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+      params[i].caseP.mu[1]=pdTheta[2] + pdTheta[7]*sqrt(pdTheta[5]/pdTheta[3])*(logit(params[i].caseP.X,"initNCAR mu1")-pdTheta[0]);
+      if(setP->verbose>=2 && !setP->sem && (i<3 || i==422))
+      //if(setP->verbose>=2  && i<3)
+        Rprintf("mu primes for %d: %5g %5g (mu2: %5g p7: %5g p5: %5g X-T: %5g)\n",i,params[i].caseP.mu[0],params[i].caseP.mu[1],pdTheta[2],pdTheta[7],pdTheta[5],logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+    }
+  }
+  else { //fixed rho
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1 | 3, (5) sig_2 | 3, (6) beta1, (7) beta2, (8) r_12 | 3
+    //CODE BLOCK A
+    setP->Sigma[0][0]= pdTheta[4];
+    setP->Sigma[1][1]= pdTheta[5];
+    setP->Sigma[0][1]= pdTheta[8]*sqrt(pdTheta[4]*pdTheta[5]); //covar
+    setP->Sigma[1][0]= setP->Sigma[0][1]; //symmetry
+    dinv2D((double*)(&(setP->Sigma[0][0])), 2, (double*)(&(setP->InvSigma[0][0])),"NCAR M-step S2");
+
+    for(i=0;i<setP->t_samp;i++) {
+      params[i].caseP.mu[0]=pdTheta[1] + pdTheta[6]*(logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+      params[i].caseP.mu[1]=pdTheta[2] + pdTheta[7]*(logit(params[i].caseP.X,"initNCAR mu1")-pdTheta[0]);
+      if(setP->verbose>=2 && !setP->sem && (i<3 || i==422))
+      //if(setP->verbose>=2  && i<3)
+        Rprintf("mu primes for %d: %5g %5g (mu2: %5g p7: %5g p5: %5g X-T: %5g)\n",i,params[i].caseP.mu[0],params[i].caseP.mu[1],pdTheta[2],pdTheta[7],pdTheta[5],logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+    }
+
+  }
+}
+
+//CCAR initialize
+void initCCAR(Param* params, double* pdTheta) {
+  setParam* setP=params[0].setP;
+    int i;
+  if (!setP->fixedRho) { //variable rho
+    //reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1, (5) sig_2, (6) r_13, (7) r_23, (8) r_12
+
+    setP->Sigma[0][0]= pdTheta[4]*(1 - pdTheta[6]*pdTheta[6]);
+    setP->Sigma[1][1]= pdTheta[5]*(1 - pdTheta[7]*pdTheta[7]);
+    setP->Sigma[0][1]= (pdTheta[8] - pdTheta[6]*pdTheta[7])/sqrt((1 - pdTheta[6]*pdTheta[6])*(1 - pdTheta[7]*pdTheta[7])); //correlation
+    setP->Sigma[0][1]= setP->Sigma[0][1]*sqrt(setP->Sigma[0][0]*setP->Sigma[1][1]); //covar
+    setP->Sigma[1][0]= setP->Sigma[0][1]; //symmetry
+    dinv2D((double*)(&(setP->Sigma[0][0])), 2, (double*)(&(setP->InvSigma[0][0])),"NCAR M-step S2");
+    //assign each data point the new mu (different for each point)
+    for(i=0;i<setP->t_samp;i++) {
+      params[i].caseP.mu[0]=pdTheta[1] + pdTheta[6]*sqrt(pdTheta[4]/pdTheta[3])*(logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+      params[i].caseP.mu[1]=pdTheta[2] + pdTheta[7]*sqrt(pdTheta[5]/pdTheta[3])*(logit(params[i].caseP.X,"initNCAR mu1")-pdTheta[0]);
+      if(setP->verbose>=2 && !setP->sem && (i<3 || i==422))
+      //if(setP->verbose>=2  && i<3)
+        Rprintf("mu primes for %d: %5g %5g (mu2: %5g p7: %5g p5: %5g X-T: %5g)\n",i,params[i].caseP.mu[0],params[i].caseP.mu[1],pdTheta[2],pdTheta[7],pdTheta[5],logit(params[i].caseP.X,"initNCAR mu0")-pdTheta[0]);
+    }
+  }
+  else { //fixed rho
+  }
+}
+
+/*
+ * input: optTheta,pdTheta,params,Rmat
+ * output: param_lenxparam_len matrices Rmat and Rmat_old
+ * optTheta is optimal theta
+ * pdTheta is current theta
+ * Rmat_old contains the input Rmat
+ */
+ void ecoSEM(double* optTheta, double* pdTheta, Param* params, double Rmat_old[7][7], double Rmat[7][7]) {
+    //assume we have optTheta, ie \hat{phi}
+    //pdTheta is phi^{t+1}
+    int i,j,verbose,len,param_len;
+    setParam setP_sem=*(params[0].setP);
+    param_len=setP_sem.param_len;
+    double *SuffSem=doubleArray(setP_sem.suffstat_len+1); //sufficient stats
+    double phiTI[param_len]; //phi^t_i
+    double phiTp1I[param_len]; //phi^{t+1}_i
+    double t_optTheta[param_len]; //transformed optimal
+    double t_phiTI[param_len]; //transformed phi^t_i
+    double t_phiTp1I[param_len]; //transformed phi^{t+1}_i
+    Param* params_sem=(Param*) Calloc(params->setP->t_samp,Param);
+    verbose=setP_sem.verbose;
+    //determine length of R matrix
+    len=0;
+    for(j=0; j<param_len;j++)
+      if(setP_sem.varParam[j]) len++;
+
+    //first, save old Rmat
+    for(i=0;i<len;i++)
+      for(j=0;j<len;j++)
+        Rmat_old[i][j]=Rmat[i][j];
+
+    for(i=0;i<len;i++) {
+      if (!setP_sem.semDone[i]) { //we're not done with this row
+        //step 1: set phi^t_i
+        if (verbose>=2) Rprintf("Theta(%d):",(i+1));
+        int switch_index_ir=0; int switch_index_it;
+        for(j=0;j<param_len;j++) {
+          if (!setP_sem.varParam[j]) //const
+            phiTI[j]=optTheta[j];
+          else {
+            if (i==switch_index_ir) {
+              phiTI[j]=pdTheta[j]; //current value
+              switch_index_it=j;
+            }
+            else phiTI[j]=optTheta[j]; //optimal value
+            switch_index_ir++;
+          }
+          if (verbose>=2) Rprintf(" %5g ", phiTI[j]);
+        }
+        //if (setP_sem.fixedRho) {
+        //  phiTI[len-1]=pdTheta[len-1];
+        //  phiTp1I[len-1]=pdTheta[len-1];
+        // if (verbose>=2) Rprintf(" %5g ", phiTI[len-1]);
+        //}
+        if (verbose>=2) Rprintf("\n");
+        for(j=0;j<param_len;j++) phiTp1I[j]=phiTI[j]; //init next iteration
+
+        //step 2: run an E-step and an M-step with phi^t_i
+        //initialize params
+        if (!setP_sem.ncar) {
+          for(j=0;j<setP_sem.t_samp;j++) {
+            params_sem[j].setP=&setP_sem;
+            params_sem[j].caseP=params[j].caseP;
+            params_sem[j].caseP.mu[0] = phiTI[0];
+            params_sem[j].caseP.mu[1] = phiTI[1];
+          }
+          setP_sem.Sigma[0][0] = phiTI[2];
+          setP_sem.Sigma[1][1] = phiTI[3];
+          setP_sem.Sigma[0][1] = phiTI[4]*sqrt(phiTI[2]*phiTI[3]);
+          setP_sem.Sigma[1][0] = setP_sem.Sigma[0][1];
+          dinv2D((double*)(&(setP_sem.Sigma[0][0])), 2, (double*)(&(setP_sem.InvSigma[0][0])), "SEM: CAR init ");
+        }
+        else {
+          for(j=0;j<setP_sem.t_samp;j++) {
+            params_sem[j].setP=&setP_sem;
+            params_sem[j].caseP=params[j].caseP;
+          }
+          setP_sem.Sigma3[0][0] = phiTI[4];
+          setP_sem.Sigma3[1][1] = phiTI[5];
+          setP_sem.Sigma3[2][2] = phiTI[3];
+
+          //covariances
+          setP_sem.Sigma3[0][1] = phiTI[8]*sqrt(phiTI[4]*phiTI[5]);
+          setP_sem.Sigma3[0][2] = phiTI[6]*sqrt(phiTI[4]*phiTI[3]);
+          setP_sem.Sigma3[1][2] = phiTI[7]*sqrt(phiTI[5]*phiTI[3]);
+
+          //symmetry
+          setP_sem.Sigma3[1][0] = setP_sem.Sigma3[0][1];
+          setP_sem.Sigma3[2][0] = setP_sem.Sigma3[0][2];
+          setP_sem.Sigma3[2][1] = setP_sem.Sigma3[1][2];
+          if (verbose>=2) {
+            Rprintf("Sigma3: %5g %5g %5g %5g %5g %5g; %5g %5g\n",setP_sem.Sigma3[0][0],setP_sem.Sigma3[0][1],setP_sem.Sigma3[1][1],setP_sem.Sigma3[0][2],setP_sem.Sigma3[1][2],setP_sem.Sigma3[2][2],*(&(setP_sem.Sigma3[0][0])+0),*(&(setP_sem.Sigma3[0][0])+8));
+          }
+          dinv2D((double*)(&(setP_sem.Sigma3[0][0])), 3, (double*)(&(setP_sem.InvSigma3[0][0])),"SEM: NCAR Sig3 init");
+          if (verbose>=2) {
+            Rprintf("Check 1");
+          }
+          if (setP_sem.fixedRho) ncarFixedRhoTransform(phiTI);
+          initNCAR(params_sem,phiTI);
+          if (setP_sem.fixedRho) ncarFixedRhoUnTransform(phiTI);
+          if (verbose>=2) {
+            Rprintf("Check 2");
+          }
+        }
+
+        //if (verbose>=2) {
+        //  Rprintf("Sigma: %5g %5g %5g %5g\n",setP_sem.Sigma[0][0],setP_sem.Sigma[0][1],setP_sem.Sigma[1][0],setP_sem.Sigma[1][1]);
+        //}
+
+        ecoEStep(params_sem, SuffSem);
+        if (!params[0].setP->ncar)
+          ecoMStep(SuffSem,phiTp1I,params_sem);
+        else
+          ecoMStepNCAR(SuffSem,phiTp1I,params_sem);
+
+        //step 3: create new R matrix row
+        transformTheta(phiTp1I,t_phiTp1I,setP_sem.param_len,&setP_sem);
+        transformTheta(optTheta,t_optTheta,setP_sem.param_len,&setP_sem);
+        transformTheta(phiTI,t_phiTI,setP_sem.param_len,&setP_sem);
+        /*if (verbose>=2) {
+          Rprintf("T+1:");
+          for (j=0;j<param_len;j++) Rprintf(" %5g ", phiTp1I[j]);
+          Rprintf("\nOpt:");
+          for (j=0;j<param_len;j++) Rprintf(" %5g ", optTheta[j]);
+          Rprintf("\n 2nd item: %5g %5g %5g %5g", t_phiTp1I[2], t_optTheta[2], t_phiTI[switch_index_it], t_optTheta[switch_index_it]);
+        }*/
+        int index_jr=0;
+        for(j = 0; j<param_len; j++) {
+          if (setP_sem.varParam[j]) {
+            Rmat[i][index_jr]=(t_phiTp1I[j]-t_optTheta[j])/(t_phiTI[switch_index_it]-t_optTheta[switch_index_it]);
+            index_jr++;
+          }
+        }
+
+        //step 4: check for difference
+        params[0].setP->semDone[i]=closeEnough((double*)Rmat[i],(double*)Rmat_old[i],len,sqrt(params[0].setP->convergence));
+
+      }
+      else { //keep row the same
+        for(j = 0; j<len; j++)
+          Rmat[i][j]=Rmat_old[i][j];
+      }
+    }
+    if(verbose>=1) {
+      for(i=0;i<len;i++) {
+        Rprintf("\nR Matrix row %d (%s): ", (i+1), (params[0].setP->semDone[i]) ? "    Done" : "Not done");
+        for(j=0;j<len;j++) {
+          Rprintf(" %5.2f ",Rmat[i][j]);
+        }
+      }
+      Rprintf("\n\n");
+    }
+    Free(SuffSem);
+    Free(params_sem);
+ }
+
+
+
+/*
+ * Read in the data set and population params
+ */
+ void readData(Param* params, int n_dim, double* pdX, double* sur_W, double* x1_W1, double* x0_W2,
+                int n_samp, int s_samp, int x1_samp, int x0_samp) {
+     /* read the data set */
+int itemp,i,j,surv_dim;
+double dtemp;
+setParam* setP=params[0].setP;
+
+  /** Packing Y, X  **/
+  itemp = 0;
+  for (j = 0; j < n_dim; j++)
+    for (i = 0; i < n_samp; i++) {
+      params[i].caseP.data[j] = pdX[itemp++];
+    }
+
+  for (i = 0; i < n_samp; i++) {
+    params[i].caseP.dataType=0;
+    params[i].caseP.X=params[i].caseP.data[0];
+    params[i].caseP.Y=params[i].caseP.data[1];
+    //fix X edge cases
+    params[i].caseP.X=(params[i].caseP.X >= 1) ? .9999 : ((params[i].caseP.X <= 0) ? 0.0001 : params[i].caseP.X);
+    //fix Y edge cases
+    params[i].caseP.Y=(params[i].caseP.Y >= 1) ? .9999 : ((params[i].caseP.Y <= 0) ? 0.0001 : params[i].caseP.Y);
+  }
+
+  /*read homeogenous areas information */
+    for (i=n_samp; i<n_samp+x1_samp; i++) {
+      params[i].caseP.dataType=1;
+      params[i].caseP.W[0]=(x1_W1[i] == 1) ? .9999 : ((x1_W1[i]==0) ? .0001 : x1_W1[i]);
+      params[i].caseP.Wstar[0]=logit(params[i].caseP.W[0],"X1 read");
+    }
+
+    for (i=n_samp+x1_samp; i<n_samp+x1_samp+x0_samp; i++) {
+      params[i].caseP.dataType=2;
+      params[i].caseP.W[1]=(x0_W2[i] == 1) ? .9999 : ((x0_W2[i]==0) ? .0001 : x0_W2[i]);
+      params[i].caseP.Wstar[1]=logit(params[i].caseP.W[1],"X0 read");
+    }
+
+
+  /*read the survey data */
+    itemp=0;
+    surv_dim=n_dim + (setP->ncar ? 1 : 0); //if NCAR, the survey data will include X's
+    for (j=0; j<surv_dim; j++) {
+      for (i=n_samp+x1_samp+x0_samp; i<n_samp+x1_samp+x0_samp+s_samp; i++) {
+        dtemp=sur_W[itemp++];
+        params[i].caseP.dataType=3;
+        if (j<n_dim) {
+          params[i].caseP.W[j]=(dtemp == 1) ? .9999 : ((dtemp==0) ? .0001 : dtemp);
+          params[i].caseP.Wstar[j]=logit(params[i].caseP.W[j],"Survey read");
+        }
+        else { //if given the X (NCAR), we set the X and contruct Y
+          params[i].caseP.X=(dtemp == 1) ? .9999 : ((dtemp==0) ? .0001 : dtemp);
+          params[i].caseP.Y=params[i].caseP.X*params[i].caseP.W[0]+(1-params[i].caseP.X);
+        }
+      }
+    }
+
+    if (setP->verbose>=2) {
+      Rprintf("Y X\n");
+      for(i=0;i<5;i++) Rprintf("%5d%14g%14g\n",i,params[i].caseP.Y,params[i].caseP.X);
+      if (s_samp>0) {
+        Rprintf("SURVEY data\nY X\n");
+        int s_max=fmin2(n_samp+x1_samp+x0_samp+s_samp,n_samp+x1_samp+x0_samp+5);
+        for(i=n_samp+x1_samp+x0_samp; i<s_max; i++) Rprintf("%5d%14g%14g\n",i,params[i].caseP.Y,params[i].caseP.X);
+      }
+    }
+
+ }
+
+/*
+ * Parameterizes the elements of theta
+ * Input: pdTheta
+ * Mutates: t_pdTheta
+ */
+void transformTheta(double* pdTheta, double* t_pdTheta, int len, setParam* setP) {
+  if (len<=5) {
+    t_pdTheta[0]=pdTheta[0];
+    t_pdTheta[1]=pdTheta[1];
+    t_pdTheta[2]=log(pdTheta[2]);
+    t_pdTheta[3]=log(pdTheta[3]);
+    t_pdTheta[4]=.5*(log(1+pdTheta[4])-log(1-pdTheta[4]));
+  }
+  else {
+    t_pdTheta[0]=pdTheta[0];
+    t_pdTheta[1]=pdTheta[1];
+    t_pdTheta[2]=pdTheta[2];
+    t_pdTheta[3]=log(pdTheta[3]);
+    t_pdTheta[4]=log(pdTheta[4]);
+    t_pdTheta[5]=log(pdTheta[5]);
+    t_pdTheta[6]=.5*(log(1+pdTheta[6])-log(1-pdTheta[6]));
+    t_pdTheta[7]=.5*(log(1+pdTheta[7])-log(1-pdTheta[7]));
+    t_pdTheta[8]=.5*(log(1+pdTheta[8])-log(1-pdTheta[8]));
+  }
+}
+
+void untransformTheta(double* t_pdTheta,double* pdTheta, int len, setParam* setP) {
+  if (len<=5) {
+    pdTheta[0]=t_pdTheta[0];
+    pdTheta[1]=t_pdTheta[1];
+    pdTheta[2]=exp(t_pdTheta[2]);
+    pdTheta[3]=exp(t_pdTheta[3]);
+    pdTheta[4]=(exp(2*t_pdTheta[4])-1)/(exp(2*t_pdTheta[4])+1);
+  }
+  else {
+    pdTheta[0]=t_pdTheta[0];
+    pdTheta[1]=t_pdTheta[1];
+    pdTheta[2]=t_pdTheta[2];
+    pdTheta[3]=exp(t_pdTheta[3]);
+    pdTheta[4]=exp(t_pdTheta[4]);
+    pdTheta[5]=exp(t_pdTheta[5]);
+    if (!setP->fixedRho) {
+      pdTheta[6]=(exp(2*t_pdTheta[6])-1)/(exp(2*t_pdTheta[6])+1);
+      pdTheta[7]=(exp(2*t_pdTheta[7])-1)/(exp(2*t_pdTheta[7])+1);
+    }
+    else {
+      pdTheta[6]=t_pdTheta[6];
+      pdTheta[7]=t_pdTheta[7];
+    }
+    pdTheta[8]=(exp(2*t_pdTheta[8])-1)/(exp(2*t_pdTheta[8])+1);
+  }
+}
+
+/**
+ * untransforms theta under ncar
+ * input reference:  (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1 | 3, (5) sig_2 | 3, (6) beta1, (7) beta2, (8) r_12 | 3
+ * output reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1, (5) sig_2, (6) r_13, (7) r_23, (8) r_12
+ * mutates: pdTheta
+ **/
+void ncarFixedRhoUnTransform(double* pdTheta) {
+  double* tmp=doubleArray(9);
+  int i;
+  for (i=0;i<9;i++) tmp[i]=pdTheta[i];
+  pdTheta[0]=tmp[0];
+  pdTheta[1]=tmp[1];
+  pdTheta[2]=tmp[2];
+  pdTheta[3]=tmp[3];
+  pdTheta[4]=tmp[4] + tmp[6]*tmp[6]*tmp[3];
+  pdTheta[5]=tmp[5] + tmp[7]*tmp[7]*tmp[3];
+  pdTheta[6]=(tmp[6]*sqrt(tmp[3]))/(sqrt(pdTheta[4]));
+  pdTheta[7]=(tmp[7]*sqrt(tmp[3]))/(sqrt(pdTheta[5]));
+  pdTheta[8]=(tmp[8]*sqrt(tmp[4]*tmp[5]) + tmp[6]*tmp[7]*tmp[3])/(sqrt(pdTheta[4]*pdTheta[5]));
+  Free(tmp);
+}
+
+/**
+ * transforms theta under ncar
+ * input reference:  (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1, (5) sig_2, (6) r_13, (7) r_23, (8) r_12
+ * output reference: (0) mu_3, (1) mu_1, (2) mu_2, (3) sig_3, (4) sig_1 | 3, (5) sig_2 | 3, (6) beta1, (7) beta2, (8) r_12 | 3
+ * mutates: pdTheta
+ **/
+void ncarFixedRhoTransform(double* pdTheta) {
+  double* tmp=doubleArray(9);
+  int i;
+  for (i=0;i<9;i++) tmp[i]=pdTheta[i];
+  pdTheta[0]=tmp[0];
+  pdTheta[1]=tmp[1];
+  pdTheta[2]=tmp[2];
+  pdTheta[3]=tmp[3];
+  pdTheta[4]=tmp[4] - tmp[6]*tmp[6]*tmp[4];
+  pdTheta[5]=tmp[5] - tmp[7]*tmp[7]*tmp[5];
+  pdTheta[6]=tmp[6]*sqrt(tmp[4]/tmp[3]);
+  pdTheta[7]=tmp[7]*sqrt(tmp[5]/tmp[3]);
+  pdTheta[8]=(tmp[8] - tmp[6]*tmp[7])/(sqrt((1 - tmp[6]*tmp[6])*(1 - tmp[7]*tmp[7])));
+  Free(tmp);
+}
+
+
+/**
+ * Input transformed theta, loglikelihood, iteration
+ * Mutates: history_full
+ **/
+void setHistory(double* t_pdTheta, double loglik, int iter,setParam* setP,double history_full[][10]) {
+  //calc len
+  /*if you don't want to record the contant m3 and s3 in ncar use the code commented out*/
+  /*int i,j;
+  int len=0;
+  for(j=0; j<setP->param_len;j++)
+    if(setP->varParam[j]) len++;
+  i=0;
+  for(j=0;j<setP->param_len;j++)
+    if(setP->varParam[j]) {
+      history_full[iter][i]=t_pdTheta[j];
+      i++;
+    }*/
+  int len=setP->param_len;
+  int j;
+  for(j=0;j<len;j++)
+      history_full[iter][j]=t_pdTheta[j];
+  if (iter>0) history_full[iter-1][len]=loglik;
+}
+
+/*
+ * Determines whether we have converged
+ * Takes in the current and old (one step previous) array of theta values
+ * maxerr is the maximum difference two corresponding values can have before the
+ *  function returns false
+ */
+int closeEnough(double* pdTheta, double* pdTheta_old, int len, double maxerr) {
+  int j;
+  for(j = 0; j<len; j++)
+    if (fabs(pdTheta[j]-pdTheta_old[j])>=maxerr) return 0;
+  return 1;
+}
+
+int semDoneCheck(setParam* setP) {
+  int varlen=0; int j;
+  for(j=0; j<setP->param_len;j++)
+    if(setP->varParam[j]) varlen++;
+  for(j=0;j<varlen;j++)
+    if(setP->semDone[j]==0) return 0;
+  return 1;
+}
+
+void gridEStep(Param* params, int n_samp, int s_samp, int x1_samp, int x0_samp, double* suff, int verbose, double minW1, double maxW1) {
+
+  int n_dim=2;
+  int n_step=5000;    /* The default size of grid step */
+  int ndraw=10000;
+  int trapod=0;       /* 1 if use trapozodial ~= in numer. int.*/
+  int *n_grid=intArray(n_samp);                /* grid size */
+  double **W1g=doubleMatrix(n_samp, n_step);   /* grids for W1 */
+  double **W2g=doubleMatrix(n_samp, n_step);   /* grids for W2 */
+  double *vtemp=doubleArray(n_dim);
+  int *mflag=intArray(n_step);
+  double *prob_grid=doubleArray(n_step);
+  double *prob_grid_cum=doubleArray(n_step);
+  double **X=doubleMatrix(n_samp,n_dim);     /* Y and covariates */
+
+  int itemp,i,j,k,t_samp;
+  double dtemp,dtemp1,temp0,temp1;
+
+  t_samp=n_samp+x1_samp+x0_samp+s_samp;
+
+  double **W=doubleMatrix(t_samp,n_dim);     /* W1 and W2 matrix */
+  double **Wstar=doubleMatrix(t_samp,5);     /* pseudo data(transformed*/
+
+  for (i=0;i<t_samp;i++)
+    for(j=0;j<n_dim;j++)
+      X[i][j]=params[i].caseP.data[j];
+
+  GridPrep(W1g, W2g, (double**) params[i].caseP.data, (double*)&maxW1, (double*)&minW1, n_grid, n_samp, n_step);
+
+    for (i=0; i<n_step; i++) {
+    mflag[i]=0;
+  }
+
+
+  //update W, Wstar given mu, Sigma in regular areas
+  for (i=0;i<n_samp;i++){
+    if ( params[i].caseP.Y!=0 && params[i].caseP.Y!=1 ) {
+      // project BVN(mu, Sigma) on the inth tomo line
+      dtemp=0;
+      for (j=0;j<n_grid[i];j++){
+        vtemp[0]=log(W1g[i][j])-log(1-W1g[i][j]);
+        vtemp[1]=log(W2g[i][j])-log(1-W2g[i][j]);
+        prob_grid[j]=dMVN(vtemp, params[i].caseP.mu, (double**)(params[i].setP->InvSigma), 2, 1) -
+          log(W1g[i][j])-log(W2g[i][j])-log(1-W1g[i][j])-log(1-W2g[i][j]);
+        prob_grid[j]=exp(prob_grid[j]);
+        dtemp+=prob_grid[j];
+        prob_grid_cum[j]=dtemp;
+      }
+      for (j=0;j<n_grid[i];j++){
+        prob_grid_cum[j]/=dtemp; //standardize prob.grid
+      }
+      // MC numerical integration, compute E(W_i|Y_i, X_i, theta)
+      //2 sample ndraw W_i on the ith tomo line
+      //   use inverse CDF method to draw
+      //   0-1 by 1/ndraw approx uniform distribution
+      //3 compute Wsta_i from W_i
+      j=0;
+      itemp=1;
+
+      for (k=0; k<ndraw; k++){
+        j=findInterval(prob_grid_cum, n_grid[i],
+		      (double)(1+k)/(ndraw+1), 1, 1, itemp, mflag);
+        itemp=j-1;
+
+
+        if ((W1g[i][j]==0) || (W1g[i][j]==1))
+          Rprintf("W1g%5d%5d%14g", i, j, W1g[i][j]);
+        if ((W2g[i][j]==0) || (W2g[i][j]==1))
+          Rprintf("W2g%5d%5d%14g", i, j, W2g[i][j]);
+
+        if (j==0 || trapod==0) {
+          W[i][0]=W1g[i][j];
+          W[i][1]=W2g[i][j];
+        }
+        else if (j>=1 && trapod==1) {
+          if (prob_grid_cum[j]!=prob_grid_cum[(j-1)]) {
+            dtemp1=((double)(1+k)/(ndraw+1)-prob_grid_cum[(j-1)])/(prob_grid_cum[j]-prob_grid_cum[(j-1)]);
+            W[i][0]=dtemp1*(W1g[i][j]-W1g[i][(j-1)])+W1g[i][(j-1)];
+            W[i][1]=dtemp1*(W2g[i][j]-W2g[i][(j-1)])+W2g[i][(j-1)];
+          }
+          else if (prob_grid_cum[j]==prob_grid_cum[(j-1)]) {
+            W[i][0]=W1g[i][j];
+            W[i][1]=W2g[i][j];
+          }
+        }
+        temp0=log(W[i][0])-log(1-W[i][0]);
+        temp1=log(W[i][1])-log(1-W[i][1]);
+        Wstar[i][0]+=temp0;
+        Wstar[i][1]+=temp1;
+        Wstar[i][2]+=temp0*temp0;
+        Wstar[i][3]+=temp0*temp1;
+        Wstar[i][4]+=temp1*temp1;
+      }
+    }
+  }
+
+  // compute E_{W_i|Y_i} for n_samp
+  for (i=0; i<n_samp; i++) {
+    if ( X[i][1]!=0 && X[i][1]!=1 ) {
+      Wstar[i][0]/=ndraw;  //E(W1i)
+      Wstar[i][1]/=ndraw;  //E(W2i)
+      Wstar[i][2]/=ndraw;  //E(W1i^2)
+      Wstar[i][3]/=ndraw;  //E(W1iW2i)
+      Wstar[i][4]/=ndraw;  //E(W2i^2)
+    }
+  } //for x0type, x1type and survey data, E-step is either the observed value or the analytical expectation
+
+  /* compute sufficient statistics */
+  for (j=0; j<5; j++)
+    suff[j]=0;
+
+  for (i=0; i<t_samp; i++) {
+    suff[0]+=Wstar[i][0];  /* sumE(W_i1|Y_i) */
+    suff[1]+=Wstar[i][1];  /* sumE(W_i2|Y_i) */
+    suff[2]+=Wstar[i][2];  /* sumE(W_i1^2|Y_i) */
+    suff[3]+=Wstar[i][4];  /* sumE(W_i2^2|Y_i) */
+    suff[4]+=Wstar[i][3];  /* sumE(W_i1^W_i2|Y_i) */
+  }
+
+
+  for(j=0; j<5; j++)
+    suff[j]=suff[j]/t_samp;
+
+  Free(n_grid);Free(vtemp);Free(mflag);Free(prob_grid);Free(prob_grid_cum);
+  FreeMatrix(W1g,n_samp);FreeMatrix(W2g,n_samp);FreeMatrix(X,n_samp);
+  FreeMatrix(W,t_samp);FreeMatrix(Wstar,t_samp);
+
+}
diff --git a/src/gibbsXBase.c b/src/gibbsXBase.c
index 8cd3392..c4bfffe 100644
--- a/src/gibbsXBase.c
+++ b/src/gibbsXBase.c
@@ -1,10 +1,4 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
diff --git a/src/gibbsXDP.c b/src/gibbsXDP.c
index b2c15d1..c8f0d94 100644
--- a/src/gibbsXDP.c
+++ b/src/gibbsXDP.c
@@ -1,10 +1,4 @@
-/******************************************************************
-  This file is a part of eco: R Package for Estimating Fitting 
-  Bayesian Models of Ecological Inference for 2X2 tables
-  by Ying Lu and Kosuke Imai
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
diff --git a/src/gibbsZBase.c b/src/gibbsZBase.c
new file mode 100644
index 0000000..513790b
--- /dev/null
+++ b/src/gibbsZBase.c
@@ -0,0 +1,408 @@
+#include <string.h>
+#include <stddef.h>
+#include <stdio.h>      
+#include <math.h>
+#include <Rmath.h>
+#include <R.h>
+#include "vector.h"
+#include "subroutines.h"
+#include "rand.h"
+#include "sample.h"
+
+void cBaseecoZ(
+	      /*data input */
+	      double *pdX,     /* data (X, Y) */
+	      double *pdZ,     /* covariates Z */
+	      int *pinZp,      /* dimension of Z
+				  if =1, =gibbsBase
+			             =2 and Z=X, gibbsXBase
+			             >2 or Z!=X, regression*/
+	      int *pin_samp,   /* sample size */
+	      /*MCMC draws */
+	      int *n_gen,      /* number of gibbs draws */
+	      int *burn_in,    /* number of draws to be burned in */
+	      int *pinth,        /* keep every nth draw */
+	      int *verbose,    /* 1 for output monitoring */
+
+	      /* prior specification for imputation, (beta, Sigma)~N-InvWish*/
+	      /* prior for Sigma~InvWish(nu, S)*/
+	      int *pinu0,       /* prior df parameter for InvWish */
+	      double *pdS0,     /* prior scale for Sigma */
+	      /* prior for beta~N(b0, Sigma*A0^-1) */
+	      double *pdbeta0,      /* prior mean for beta*/
+	      double *pdA0,   /* prior PRECISION=1/SCALE parameter for beta*/
+
+	      /* staring values */
+	      double *betastart, double *Sigmastart,
+
+	      /*incorporating survey data */
+	      int *survey,      /*1 if survey data available (set of W_1, W_2)
+				  0 not*/
+	      int *sur_samp,    /*sample size of survey data*/
+	      double *sur_W,    /*set of known W_1, W_2 */ 
+	      
+	      /*incorporating homeogenous areas */
+	      int *x1,       /* 1 if X=1 type areas available W_1 known, W_2 unknown */
+	      int *sampx1,   /* number X=1 type areas */
+	      double *x1_W1, /* values of W_1 for X1 type areas */
+
+	      int *x0,       /* 1 if X=0 type areas available W_2 known, W_1 unknown */
+	      int *sampx0,   /* number X=0 type areas */
+	      double *x0_W2, /* values of W_2 for X0 type areas */
+
+	      /* bounds of W1 */
+	      double *minW1, double *maxW1,
+
+	      /* storage */
+	      int *parameter,/* 1 if save population parameter */
+	      int *Grid,
+
+	      /* storage for Gibbs draws of beta and Sigam, packed */
+	      double *pdSBeta, double *pdSSigma,
+	      /* storage for Gibbs draws of W*/
+	      double *pdSW1, double *pdSW2
+	      ){	   
+  
+  int n_samp = *pin_samp; /* sample size */
+  int nth = *pinth;  
+  int s_samp = *sur_samp; /* sample size of survey data */ 
+  int x1_samp = *sampx1;
+  int x0_samp = *sampx0;
+  int t_samp = n_samp+s_samp+x1_samp+x0_samp;  /* total sample size */ 
+  int n_dim = 2;          /* The dimension of the ecological table */
+  int n_cov = *pinZp;     /* The dimension of the covariates */
+  int n_step = 1000;
+  
+  /* priors */
+  double *beta0 = doubleArray(n_cov); /* prior mean of beta */
+  double **S0 = doubleMatrix(n_dim, n_dim); /* prior scale for Sigma */
+  double **A0 = doubleMatrix(n_cov, n_cov); /* prior precision for beta */ 
+  int nu0 = *pinu0;                         /* prior df for Sigma */   
+
+  /* data */
+  double **X = doubleMatrix(n_samp, n_dim); /* The Y and X */
+
+  /*The known W1 and W2 matrix*/
+  double **S_W = doubleMatrix(s_samp, n_dim); 
+  double **S_Wstar=doubleMatrix(s_samp, n_dim); 
+
+  /* pseudo data Wstar */
+  double **W = doubleMatrix(t_samp, n_dim);
+  double **Wstar = doubleMatrix(t_samp, n_dim);
+  double *Wstar_bar = doubleArray(n_dim);
+
+  /* The covariates and W */ 
+  double **Z = doubleMatrix(t_samp*n_dim+n_cov, n_cov+1);
+  /* Z*cholesky factor of covaraince matrix*/ 
+  double **Zstar = doubleMatrix(t_samp*n_dim+n_cov, n_cov+1);
+
+  /* grids */
+  double **W1g = doubleMatrix(n_samp, n_step); /* grids for W1 */
+  double **W2g = doubleMatrix(n_samp, n_step); /* grids for W2 */
+  int *n_grid = intArray(n_samp);              /* grid size */
+
+  /* paramters for Wstar under Normal baseline model */
+  double *beta = doubleArray(n_cov); /* vector of regression coefficients */
+  double **mu = doubleMatrix(t_samp, n_dim); 
+  double **Sigma = doubleMatrix(n_dim, n_dim);
+  double **InvSigma = doubleMatrix(n_dim, n_dim);
+
+  /*posterior parameters for beta and Sigma*/
+  double *mbeta = doubleArray(n_cov);         /* posterior mean of beta*/
+  double **Vbeta = doubleMatrix(n_cov,n_cov); /* posterior varaince of beta */ 
+
+  /* matrices used for sweep */
+  /* quantities used in sweep */
+  double **SS = doubleMatrix(n_cov+1, n_cov+1); /* the sum of square matrix */
+  double *epsilon = doubleArray(t_samp*n_dim);  /* The error term */
+  double **R = doubleMatrix(n_dim, n_dim);      /* ee' */
+ 
+  /* misc variables */
+  int i, j, k, t, l, main_loop;   /* used for various loops */
+  int itemp;
+  int itempA=0; /* counter for alpha */
+  int itempB=0; 
+  int itempC=0; /* counter to control nth draw */
+  int itempS=0; /* counter for storage */
+
+  int progress = 1, itempP = ftrunc((double) *n_gen/10);
+  double dtemp, dtemp1;
+  double *vtemp = doubleArray(n_dim);
+  double **mtemp = doubleMatrix(n_dim, n_dim);
+  double **mtemp1 = doubleMatrix(n_dim, n_dim);
+  double **mtemp2 = doubleMatrix(n_cov, n_cov);
+
+  /* get random seed */
+  GetRNGstate();
+
+  /**read prior information*/
+  itemp=0;
+  for (k=0; k<n_cov; k++) {
+    beta0[k]=pdbeta0[k];
+    for (j=0; j<n_cov; j++) 
+      A0[j][k]=pdA0[itemp++];
+  }
+  itemp=0;
+  for (k=0; k<n_dim; k++)
+    for (j=0; j<n_dim; j++) 
+      S0[j][k]=pdS0[itemp++];
+    
+  /* read the data set */
+  /** Packing Y, X  **/
+  itemp = 0;
+  for (j = 0; j < n_dim; j++) 
+    for (i = 0; i < n_samp; i++) 
+      X[i][j] = pdX[itemp++];
+  
+  /**read Z **/
+  for (i=0; i<t_samp*n_dim+n_cov; i++)
+    for(j=0; j<=n_cov;j++) {
+      Z[i][j]=0;
+      Zstar[i][j]=0;
+    }
+  itemp = 0;
+  for (k=0; k<n_cov; k++)
+    for (j=0; j<n_dim; j++)
+      for (i=0; i<t_samp; i++)
+	Z[j*t_samp+i][k]=pdZ[itemp++];
+  
+  /* add prior information to Z*/
+  dcholdc(A0, n_cov, mtemp2);  /*Cholesky decomopsition*/
+
+  for (j=0; j<n_cov; j++) {
+    Zstar[t_samp*n_dim+j][n_cov]=beta0[j];
+    for (k=0; k<n_cov; k++){
+      Zstar[t_samp*n_dim+j][n_cov]+=mtemp2[j][k]*beta0[j];
+      Zstar[t_samp*n_dim+j][k]=mtemp2[j][k];
+    }
+  }      
+    
+  /* initialize W, Wstar for n_samp*/
+  for (i=0; i< n_samp; i++) {
+    if (X[i][1]!=0 && X[i][1]!=1) {
+      W[i][0]=runif(minW1[i], maxW1[i]);
+      W[i][1]=(X[i][1]-X[i][0]*W[i][0])/(1-X[i][0]);
+    }
+    if (X[i][1]==0)
+      for (j=0; j<n_dim; j++) W[i][j]=0.0001;
+    if (X[i][1]==1)
+      for (j=0; j<n_dim; j++) W[i][j]=0.9999;
+    for (j=0; j<n_dim; j++)
+      Wstar[i][j]=log(W[i][j])-log(1-W[i][j]);
+  }
+  
+  /*read homeogenous areas information */
+  if (*x1==1) 
+    for (i=0; i<x1_samp; i++) {
+      W[(n_samp+i)][0]=x1_W1[i];
+      if (W[(n_samp+i)][0]==0) W[(n_samp+i)][0]=0.0001;
+      if (W[(n_samp+i)][0]==1) W[(n_samp+i)][0]=0.9999;
+      Wstar[(n_samp+i)][0]=log(W[(n_samp+i)][0])-log(1-W[(n_samp+i)][0]);
+    }
+
+  if (*x0==1) 
+    for (i=0; i<x0_samp; i++) {
+      W[(n_samp+x1_samp+i)][1]=x0_W2[i];
+      if (W[(n_samp+x1_samp+i)][1]==0) W[(n_samp+x1_samp+i)][1]=0.0001;
+      if (W[(n_samp+x1_samp+i)][1]==1) W[(n_samp+x1_samp+i)][1]=0.9999;
+      Wstar[(n_samp+x1_samp+i)][1]=log(W[(n_samp+x1_samp+i)][1])-log(1-W[(n_samp+x1_samp+i)][1]);
+    }
+
+  /*read the survey data */
+  if (*survey==1) {
+    itemp = 0;
+    for (j=0; j<n_dim; j++)
+      for (i=0; i<s_samp; i++) {
+	S_W[i][j]=sur_W[itemp++];
+	if (S_W[i][j]==0) S_W[i][j]=0.0001;
+	if (S_W[i][j]==1) S_W[i][j]=0.9999;
+	S_Wstar[i][j]=log(S_W[i][j])-log(1-S_W[i][j]);
+	W[(n_samp+x1_samp+x0_samp+i)][j]=S_W[i][j];
+	Wstar[(n_samp+x1_samp+x0_samp+i)][j]=S_Wstar[i][j];
+	Z[(i+n_samp+x1_samp+x0_samp)*n_dim+j][n_cov]=Wstar[(i+n_samp+x1_samp+x0_samp)][j];
+      }
+  }
+
+  /* calculate grids */
+  if (*Grid)
+    GridPrep(W1g, W2g, X, maxW1, minW1, n_grid, n_samp, n_step);
+
+  /* starting vales of mu and Sigma */
+  itemp = 0;
+  for(j=0;j<n_cov;j++)
+    beta[j] = betastart[j];
+  for(j=0;j<n_dim;j++)
+    for(k=0;k<n_dim;k++)
+      Sigma[j][k]=Sigmastart[itemp++];
+  dinv(Sigma, n_dim, InvSigma);
+
+  /***Gibbs for  normal prior ***/
+  for(main_loop=0; main_loop<*n_gen; main_loop++){
+    for (i=0; i<t_samp; i++)
+      for (j=0; j<n_dim; j++) 
+	mu[i][j]=0;
+    /**update W, Wstar given mu, Sigma in regular areas**/
+    for (i=0;i<t_samp;i++)
+      for (j=0; j<n_dim; j++)
+	for (k=0; k<n_cov; k++) 
+	  mu[i][j]+=Z[i*n_dim+j][k]*beta[k];
+    
+    for (i=0; i<n_samp; i++) {
+      if ( X[i][1]!=0 && X[i][1]!=1 ) {
+	/*1 project BVN(mu, Sigma) on the inth tomo line */
+	/*2 sample W_i on the ith tomo line */
+	if (*Grid)
+	  rGrid(W[i], W1g[i], W2g[i], n_grid[i], mu[i], InvSigma, n_dim);
+	else
+	  rMH(W[i], X[i], minW1[i], maxW1[i], mu[i], InvSigma, n_dim);
+      } 
+      /*3 compute Wsta_i from W_i*/
+      Wstar[i][0]=log(W[i][0])-log(1-W[i][0]);
+      Wstar[i][1]=log(W[i][1])-log(1-W[i][1]);
+      Z[i*n_dim][n_cov]=Wstar[i][0];
+      Z[i*n_dim+1][n_cov]=Wstar[i][1];
+    }
+    
+    /*update W2 given W1, mu and Sigma in x1 homeogeneous areas */
+    if (*x1==1)
+      for (i=0; i<x1_samp; i++) {
+	dtemp=mu[n_samp+i][1]+Sigma[0][1]/Sigma[0][0]*(Wstar[n_samp+i][0]-mu[n_samp+i][0]);
+	dtemp1=Sigma[1][1]*(1-Sigma[0][1]*Sigma[0][1]/(Sigma[0][0]*Sigma[1][1]));
+	printf("\n%14g%14g\n", dtemp, dtemp1);
+	dtemp1=sqrt(dtemp1);
+	Wstar[n_samp+i][1]=rnorm(dtemp, dtemp1);
+	W[n_samp+i][1]=exp(Wstar[n_samp+i][1])/(1+exp(Wstar[n_samp+i][1]));
+	Z[(i+n_samp)*n_dim][n_cov]=Wstar[(i+n_samp)][0];
+	Z[(i+n_samp)*n_dim+1][n_cov]=Wstar[(i+n_samp)][1];
+      }
+
+    /*update W1 given W2, mu and Sigma in x0 homeogeneous areas */
+    if (*x0==1)
+      for (i=0; i<x0_samp; i++) {
+	dtemp=mu[n_samp+x1_samp+i][0]+Sigma[0][1]/Sigma[1][1]*(Wstar[n_samp+x1_samp+i][1]-mu[n_samp+x1_samp+i][1]);
+	dtemp1=Sigma[0][0]*(1-Sigma[0][1]*Sigma[0][1]/(Sigma[0][0]*Sigma[1][1]));
+	dtemp1=sqrt(dtemp1);
+	Wstar[n_samp+x1_samp+i][0]=rnorm(dtemp, dtemp1);
+	W[n_samp+x1_samp+i][0]=exp(Wstar[n_samp+x1_samp+i][0])/(1+exp(Wstar[n_samp+x1_samp+i][0]));
+	Z[(i+n_samp+x1_samp)*n_dim][n_cov]=Wstar[(i+n_samp+x1_samp)][0];
+	Z[(i+n_samp+x1_samp)*n_dim+1][n_cov]=Wstar[(i+n_samp+x1_samp)][1];
+      }
+
+    dcholdc(InvSigma, n_dim, mtemp);
+    for (i=0; i<t_samp*n_dim; i++)
+      for (j=0; j<=n_cov; j++)
+	Zstar[i][j]=0;
+    for (i=0; i<t_samp; i++)
+      for(j=0; j<n_dim; j++)
+	for(k=0; k<n_dim; k++)
+	  for(l=0; l<=n_cov; l++)
+	    Zstar[i*n_dim+k][l]+=mtemp[k][j]*Z[i*n_dim+j][l];
+
+    /*construct SS matrix for SWEEP */
+    for (j=0; j<=n_cov; j++)
+      for (k=0; k<=n_cov; k++)
+	SS[j][k]=0;
+    for(i=0; i<(t_samp*n_dim); i++)
+      for(k=0; k<=n_cov; k++)
+	for(l=0; l<=n_cov; l++)
+	  SS[k][l]+=Zstar[i][k]*Zstar[i][l];
+    for(j=0; j<n_cov; j++)
+      for(k=0; k<=n_cov; k++)
+	for(l=0; l<=n_cov; l++)
+	  SS[k][l]+=Zstar[n_samp*n_dim+j][k]*Zstar[n_samp*n_dim+j][l];
+
+    /*SWEEP to get posterior mean anf variance for beta */
+    for (j=0; j<n_cov; j++) 
+      SWP(SS,j,n_cov+1);
+
+    /*draw beta given Sigma and W */
+    for (j=0; j<n_cov; j++) {
+      mbeta[j]=SS[j][n_cov]; 
+      for (k=0; k<n_cov; k++)
+	Vbeta[j][k]=-SS[j][k];
+    }
+    rMVN(beta, mbeta, Vbeta, n_cov);
+
+    /*draw Sigmar give beta and Wstar */
+    for(i=0; i<t_samp; i++)
+      for(j=0; j<n_dim; j++) {
+	epsilon[i*n_dim+j]=Z[i*n_dim+j][n_cov];
+	for (k=0;k<n_cov; k++)
+	  epsilon[i*n_dim+j]-=Z[i*n_dim+j][k]*beta[k];
+      }
+    for(j=0; j<n_dim; j++)
+      for(k=0; k<n_dim; k++) 
+	R[j][k]=0;
+    for (i=0; i<t_samp; i++)
+      for(j=0; j<n_dim; j++)
+	for(k=0; k<n_dim; k++) 
+	  R[j][k]+=epsilon[i*n_dim+j]*epsilon[i*n_dim+k];
+    for(j=0; j<n_dim; j++)
+      for (k=0; k<n_dim; k++)
+	mtemp[j][k]=S0[j][k]+R[j][k];
+    dinv(mtemp, n_dim, mtemp1);
+    rWish(InvSigma, mtemp1, nu0+t_samp, n_dim);
+    dinv(InvSigma, n_dim, Sigma);
+    
+    /*store Gibbs draw after burn-in and every nth draws */      
+    R_CheckUserInterrupt();
+    if (main_loop>=*burn_in){
+      itempC++;
+      if (itempC==nth){
+	for (j=0; j<n_cov; j++)
+	  pdSBeta[itempA++]=beta[j];
+	for (j=0; j<n_dim; j++)
+	  for (k=j; k<n_dim; k++)
+	    pdSSigma[itempB++]=Sigma[j][k];
+	for(i=0; i<(n_samp+x1_samp+x0_samp); i++){
+	  pdSW1[itempS]=W[i][0];
+	  pdSW2[itempS]=W[i][1];
+	  itempS++;
+	}
+	itempC=0;
+      }
+    } /*end of stroage *burn_in*/
+    if (*verbose)
+      if (itempP == main_loop) {
+	Rprintf("%3d percent done.\n", progress*10);
+	itempP+=ftrunc((double) *n_gen/10); progress++;
+      R_FlushConsole();
+      }
+  } /*end of MCMC for normal */ 
+  
+  /** write out the random seed **/
+  PutRNGstate();
+
+  /* Freeing the memory */
+  FreeMatrix(X, n_samp);
+  FreeMatrix(W, t_samp);
+  FreeMatrix(Wstar, t_samp);
+  FreeMatrix(S_W, s_samp);
+  FreeMatrix(S_Wstar, s_samp);
+  free(minW1);
+  free(maxW1);
+  free(n_grid);
+  FreeMatrix(S0, n_dim);
+  FreeMatrix(W1g, n_samp);
+  FreeMatrix(W2g, n_samp);
+  FreeMatrix(mu,t_samp);
+  FreeMatrix(Sigma,n_dim);
+  FreeMatrix(InvSigma, n_dim);
+  FreeMatrix(Z, t_samp*n_dim+n_cov);
+  FreeMatrix(Zstar, t_samp*n_dim+n_cov);
+  free(Wstar_bar);
+  free(vtemp);
+  FreeMatrix(mtemp, n_dim);
+  FreeMatrix(mtemp1, n_dim);
+  FreeMatrix(mtemp2, n_cov);
+  free(beta);
+  free(beta0);
+  FreeMatrix(A0, n_cov);
+  FreeMatrix(SS, n_cov+1);
+  free(mbeta);
+  FreeMatrix(Vbeta, n_cov);
+  free(epsilon);
+  FreeMatrix(R, n_dim);
+  
+} /* main */
+
diff --git a/src/macros.h b/src/macros.h
new file mode 100644
index 0000000..109a193
--- /dev/null
+++ b/src/macros.h
@@ -0,0 +1,82 @@
+# ifndef MACROS_H
+# define MACROS_H
+
+
+/****************/
+/** structrues **/
+/****************/
+/* parameters and observed data */
+struct Param_old{
+  double mu[2];
+  double Sigma[2][2];
+  double InvSigma[2][2];
+  double Sigma3[3][3];
+  double InvSigma3[3][3];
+  int NCAR;
+  double data[2]; //collect the data
+  double X; //X,Y here for ease of use
+  double Y;
+  double normcT; //normalized const on tomog line (integrating with parameterization)
+  double W[2]; //if W is known, also handy place to store E[W1] when we calculate it each step
+  double Wstar[2]; //place to store E[W1*] when we calculate it each step
+  double W1_lb; //lower and upper bounds for W1 and W2 (not starred)
+  double W1_ub;
+  double W2_lb;
+  double W2_ub;
+  int W1_inf; //inf: 0->(lb,ub), -1->(-inf,ub), 1->(lb,inf), 2->(-inf,inf)
+  int W2_inf;
+  int suff; //the sufficient stat we're calculating: 0->W1, 1->W2,2->W1^2,3->W1W2,4->W2^2,7->Log Lik, 5/6,-1 ->test case
+};
+
+typedef struct Param_old Param_old;
+
+struct caseParam {
+  double mu[2];
+  double data[2]; //collect the data
+  double X; //X,Y here for ease of use
+  double Y;
+  double normcT; //normalized const on tomog line (integrating with parameterization)
+  double W[2]; //if W is known, also handy place to store E[W1] when we calculate it each step
+  double Wstar[2]; //place to store E[W1*] when we calculate it each step
+  double Wbounds[2][2];  //[i][j] is {j:lower,upper}-bound of W{i+1}
+  int suff; //the sufficient stat we're calculating: 0->W1, 1->W2,2->W1^2,3->W1W2,4->W2^2,7->Log Lik, 5/6,-1 ->test case
+  int dataType; //0=unknown, 1=(X==1),2=(X==0),3=survey
+  double** Z_i; //CCAR: k x 2
+};
+
+typedef struct caseParam caseParam;
+
+struct setParam {
+  int n_samp, t_samp, s_samp,x1_samp,x0_samp,param_len,suffstat_len; //types of data sizes
+  int iter, ncar, ccar, ccar_nvar, fixedRho, sem, hypTest, verbose, calcLoglik; //options
+  int semDone[7]; //whether that row of the R matrix is done
+  int varParam[9]; //whether the parameter is included in the R matrix
+  double convergence;
+  double Sigma[2][2];
+  double InvSigma[2][2];
+  double Sigma3[3][3];
+  double InvSigma3[3][3];
+  double** SigmaK; //for CCAR
+  double** InvSigmaK;
+  double** hypTestCoeff;
+  double hypTestResult;
+  double* pdTheta;
+};
+
+typedef struct setParam setParam;
+
+struct Param {
+  setParam* setP; //pointer to the singleton structure
+  caseParam caseP;
+};
+
+typedef struct Param Param;
+
+/***************************/
+/** typedef functions     **/
+/***************************/
+
+//typedef void integr_fn(double *x, int n, void *ex); //is already defined in Applic.h
+typedef double gsl_fn(double x, void *ex);
+
+# endif
diff --git a/src/preBaseX.c b/src/preBaseX.c
index 1d1c614..4c475f1 100644
--- a/src/preBaseX.c
+++ b/src/preBaseX.c
@@ -1,10 +1,4 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
diff --git a/src/preDP.c b/src/preDP.c
index 8118ef5..2eae461 100644
--- a/src/preDP.c
+++ b/src/preDP.c
@@ -1,11 +1,5 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
 #include <stddef.h>
+#include <string.h>
 #include <stdio.h>      
 #include <math.h>
 #include <Rmath.h>
diff --git a/src/preDPX.c b/src/preDPX.c
index 1ee4eb1..9004364 100644
--- a/src/preDPX.c
+++ b/src/preDPX.c
@@ -1,10 +1,4 @@
-/******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
-  of Ecological Inference for 2x2 Tables
-  by Kosuke Imai and Ying Lu
-  Copyright: GPL version 2 or later.
-*******************************************************************/
-
+#include <string.h>
 #include <stddef.h>
 #include <stdio.h>      
 #include <math.h>
diff --git a/src/rand.c b/src/rand.c
index 00aea1e..3e34d66 100644
--- a/src/rand.c
+++ b/src/rand.c
@@ -1,5 +1,5 @@
 /******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
+  This file is a part of eco: R Package for Fitting Bayesian Models
   of Ecological Inference for 2x2 Tables
   by Kosuke Imai and Ying Lu
   Copyright: GPL version 2 or later.
@@ -10,20 +10,23 @@
 #include <math.h>
 #include <Rmath.h>
 #include <R_ext/Utils.h>
+#include <R_ext/PrtUtil.h>
 #include <R.h>
 #include "vector.h"
 #include "subroutines.h"
 #include "rand.h"
 #include "sample.h"
+#include "macros.h"
+#include "fintegrate.h"
 
 /* Multivariate Normal density */
-double dMVN(			
+double dMVN(
 	double *Y,		/* The data */
 	double *MEAN,		/* The parameters */
-	double **SIG_INV,         /* inverse of the covariance matrix */	
+	double **SIG_INV,         /* inverse of the covariance matrix */
 	int dim,                /* dimension */
 	int give_log){          /* 1 if log_scale 0 otherwise */
-  
+
   int j,k;
   double value=0.0;
 
@@ -36,13 +39,14 @@ double dMVN(
   value=-0.5*value-0.5*dim*log(2*M_PI)+0.5*ddet(SIG_INV, dim, 1);
 
 
-  if(give_log)  
+  if(give_log)
     return(value);
   else
     return(exp(value));
 
 }
 
+
 /* the density of Multivariate T-distribution */
 double dMVT(
             double *Y,          /* The data */
@@ -73,7 +77,7 @@ double dMVT(
 
 
 /* Sample from the MVN dist */
-void rMVN(                      
+void rMVN(
 	  double *Sample,         /* Vector for the sample */
 	  double *mean,           /* The vector of means */
 	  double **Var,           /* The matrix Variance */
@@ -82,9 +86,9 @@ void rMVN(
   int j,k;
   double **Model = doubleMatrix(size+1, size+1);
   double cond_mean;
-    
+
   /* draw from mult. normal using SWP */
-  for(j=1;j<=size;j++){       
+  for(j=1;j<=size;j++){
     for(k=1;k<=size;k++)
       Model[j][k]=Var[j-1][k-1];
     Model[0][j]=mean[j-1];
@@ -98,7 +102,7 @@ void rMVN(
     for(k=1;k<j;k++) cond_mean+=Sample[k-1]*Model[j][k];
     Sample[j-1]=(double)norm_rand()*sqrt(Model[j][j])+cond_mean;
   }
-  
+
   FreeMatrix(Model,size+1);
 }
 
@@ -108,7 +112,7 @@ void rMVN(
    a Sample Covariance Matrix'' Journal of the American Statistical
    Association, Vol. 61, No. 313. (Mar., 1966), pp. 199-203. */
 
-void rWish(                  
+void rWish(
 	   double **Sample,        /* The matrix with to hold the sample */
 	   double **S,             /* The parameter */
 	   int df,                 /* the degrees of freedom */
@@ -120,7 +124,7 @@ void rWish(
   double **C = doubleMatrix(size, size);
   double **N = doubleMatrix(size, size);
   double **mtemp = doubleMatrix(size, size);
-  
+
   for(i=0;i<size;i++) {
     V[i]=rchisq((double) df-i-1);
     B[i][i]=V[i];
@@ -139,7 +143,7 @@ void rWish(
 	  for(k=0;k<j;k++)
 	    B[j][j]+=N[k][j]*N[k][j];
       }
-      else { 
+      else {
 	B[i][j]=N[i][j]*sqrt(V[i]);
 	if(i>0)
 	  for(k=0;k<i;k++)
@@ -148,7 +152,7 @@ void rWish(
       B[j][i]=B[i][j];
     }
   }
-  
+
   dcholdc(S, size, C);
   for(i=0;i<size;i++)
     for(j=0;j<size;j++)
@@ -174,7 +178,7 @@ void rDirich(
 {
   int j;
   double dtemp=0;
-  
+
   for (j=0; j<size; j++) {
     Sample[j] = rgamma(theta[j], 1.0);
     dtemp += Sample[j];
@@ -182,3 +186,69 @@ void rDirich(
   for (j=0 ; j<size; j++)
     Sample[j] /= dtemp;
 }
+
+/** density function on tomography line Y=XW_1+ (1-X)W_2
+ * Note: asssumes that the two points given W1* and W2*
+ * are on the tomography line
+ */
+double dBVNtomo(double *Wstar,  /* Wstar values */
+		void* pp,     //parameter
+		int give_log, /* 1 if log-scale, 0 otherwise */
+		double normc)  //Normalization factor
+
+{
+  int dim=2;
+  double *MEAN=doubleArray(dim);
+  double **SIGMA=doubleMatrix(dim,dim);
+  double density;
+  double rho, dtemp;
+
+    Param *param=(Param *)pp;
+    MEAN[0]=param->caseP.mu[0];
+    MEAN[1]=param->caseP.mu[1];
+    SIGMA[0][0]=param->setP->Sigma[0][0];
+    SIGMA[1][1]=param->setP->Sigma[1][1];
+    SIGMA[0][1]=param->setP->Sigma[0][1];
+    SIGMA[1][0]=param->setP->Sigma[1][0];
+
+
+    rho=SIGMA[0][1]/sqrt(SIGMA[0][0]*SIGMA[1][1]);
+    dtemp=1/(2*M_PI*sqrt(SIGMA[0][0]*SIGMA[1][1]*(1-rho*rho)));
+
+
+    density=-1/(2*(1-rho*rho))*
+    ((Wstar[0]-MEAN[0])*(Wstar[0]-MEAN[0])/SIGMA[0][0]+
+     +(Wstar[1]-MEAN[1])*(Wstar[1]-MEAN[1])/SIGMA[1][1]
+     -2*rho*(Wstar[0]-MEAN[0])*(Wstar[1]-MEAN[1])/sqrt(SIGMA[0][0]*SIGMA[1][1]))
+    +log(dtemp)-log(normc);
+
+     if (give_log==0) density=exp(density);
+      /*Rprintf("s11 %5g s22 %5g normc %5g dtemp %5g ldensity %5g\n", SIGMA[0][0],SIGMA[1][1],normc, dtemp, density);
+      char ch;
+     scanf(" %c", &ch );*/
+
+    Free(MEAN);
+    FreeMatrix(SIGMA,dim);
+
+     return density;
+
+
+}
+
+    double invLogit(double x) {
+      if (x>30) return 0;
+      else return (1/(1+exp(-1*x)));
+    }
+
+    double logit(double x,char* emsg) {
+      if (x>=1 || x<=0) {
+        Rprintf(emsg);
+        Rprintf(": %5g is out of logit range\n",x);
+      }
+      return log(x/(1-x));
+    }
+
+    int bit(int t, int n) {
+     t=t>>n;
+     return (t % 2);
+    }
diff --git a/src/rand.h b/src/rand.h
index ed5b569..3e5e2fb 100644
--- a/src/rand.h
+++ b/src/rand.h
@@ -1,13 +1,18 @@
 /******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
+  This file is a part of eco: R Package for Fitting Bayesian Models
   of Ecological Inference for 2x2 Tables
   by Kosuke Imai and Ying Lu
   Copyright: GPL version 2 or later.
 *******************************************************************/
 
-double dMVN(double *Y, double *MEAN, double **SIGMA, int dim, int give_log);
+double dMVN(double *Y, double *MEAN, double **SIG_INV, int dim, int give_log);
 double dMVT(double *Y, double *MEAN, double **SIG_INV, int nu, int dim, int give_log);
 void rMVN(double *Sample, double *mean, double **inv_Var, int size);
 void rWish(double **Sample, double **S, int df, int size);
 void rDirich(double *Sample, double *theta, int size);
+double dBVNtomo(double *Wstar, void* pp, int give_log, double normc);
+double invLogit(double x);
+double logit(double x,char* emsg);
+int bit(int t, int n);
+
 
diff --git a/src/subroutines.c b/src/subroutines.c
index 68f2a67..b946ea4 100644
--- a/src/subroutines.c
+++ b/src/subroutines.c
@@ -1,5 +1,5 @@
 /******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
+  This file is a part of eco: R Package for Fitting Bayesian Models
   of Ecological Inference for 2x2 Tables
   by Kosuke Imai and Ying Lu
   Copyright: GPL version 2 or later.
@@ -10,10 +10,45 @@
 #include <string.h>
 #include <math.h>
 #include <Rmath.h>
+#include <R.h>
 #include <R_ext/Lapack.h>
-#include <R.h> 
 #include "vector.h"
 #include "rand.h"
+#include "subroutines.h"
+
+
+/*
+ * Computes the dot product of two vectors
+ */
+double dotProduct(double* a, double* b, int size) {
+  int i; double ans=0;
+  for (i=0; i<size; i++) {
+    ans+=a[i]*b[i];
+  }
+  return ans;
+}
+
+/*
+ * Multiply two matrices (A,B) with dims r1,c1,r2,c2
+ * mutates C to return the answer
+ */
+void matrixMul(double** A, double** B, int r1, int c1, int r2, int c2, double** C) {
+  int i,j,k;
+  double tmp[r1][c2];
+  if (c1!=r2) error("Matrix multiplication: %d != %d", c2, r1);
+  else {
+    for (i=0; i<r1; i++)
+      for (j=0; j<c2; j++) {
+        double entry=0;
+        for(k=0;k<r2;k++) entry += A[i][k]*B[k][j];
+        tmp[i][j]=entry;
+      }
+    for (i=0; i<r1; i++)
+      for (j=0; j<c2; j++) {
+        C[i][j]=tmp[i][j];
+      }
+  }
+}
 
 /*  The Sweep operator */
 void SWP(
@@ -23,7 +58,7 @@ void SWP(
 {
   int i,j;
 
-  if (X[k][k] < 10e-20) 
+  if (X[k][k] < 10e-20)
     error("SWP: singular matrix.\n");
   else
     X[k][k]=-1/X[k][k];
@@ -36,7 +71,7 @@ void SWP(
     for(j=0;j<size;j++)
       if(i!=k && j!=k)
 	X[i][j]=X[i][j]+X[i][k]*X[k][j]/X[k][k];
-  
+
 }
 
 
@@ -48,19 +83,27 @@ void dinv(double **X,
   int i,j, k, errorM;
   double *pdInv = doubleArray(size*size);
 
-  for (i = 0, j = 0; j < size; j++) 
-    for (k = 0; k <= j; k++) 
+  for (i = 0, j = 0; j < size; j++)
+    for (k = 0; k <= j; k++)
       pdInv[i++] = X[k][j];
   F77_CALL(dpptrf)("U", &size, pdInv, &errorM);
   if (!errorM) {
     F77_CALL(dpptri)("U", &size, pdInv, &errorM);
     if (errorM) {
-      Rprintf("LAPACK dpptri failed, %d\n", errorM);
+      if (errorM>0) {
+        Rprintf("The matrix being inverted is singular. Error code %d\n", errorM);
+      } else {
+        Rprintf("The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+      }
       error("Exiting from dinv().\n");
     }
   }
   else {
-    Rprintf("LAPACK dpptrf failed, %d\n", errorM);
+    if (errorM>0) {
+      Rprintf("The matrix being inverted was not positive definite. Error code %d\n", errorM);
+    } else {
+      Rprintf("The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+    }
     error("Exiting from dinv().\n");
   }
   for (i = 0, j = 0; j < size; j++) {
@@ -70,7 +113,136 @@ void dinv(double **X,
     }
   }
 
-  free(pdInv);
+  Free(pdInv);
+}
+
+/* inverting a matrix, first tyring positive definite trick, and then symmetric
+ * Uses special syntax since we don't know dimensions of array
+ * Prevents memory errors for matrices created with double[][]
+ */
+void dinv2D(double* X,
+	  int	size,
+	  double* X_inv,char* emsg)
+{
+  int i,j, k, errorM, skip;
+  double *pdInv = doubleArray(size*size);
+  skip=0;
+
+  for (i = 0, j = 0; j < size; j++)
+    for (k = 0; k <= j; k++)
+      //pdInv[i++] = X[k][j];
+      pdInv[i++] = *(X+k*size+j);
+
+//Rprintf("test: %5g %5g %d",pdInv[0],pdInv[(size == 3) ? 5 : 2],i);
+  F77_CALL(dpptrf)("U", &size, pdInv, &errorM);
+  if (!errorM) {
+    F77_CALL(dpptri)("U", &size, pdInv, &errorM);
+    if (errorM) {
+      Rprintf(emsg);
+    if (errorM>0) {
+      Rprintf(": The matrix being inverted is singular. Error code %d\n", errorM);
+    } else {
+      Rprintf(": The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+    }
+      error("Exiting from dinv2D().\n");
+    }
+  }
+  else {
+    Rprintf(emsg);
+    if (errorM>0) {
+      /* The matrix is not positive definite.
+       * This error does occur with proper data, when the likelihood curve is flat,
+       * usually with the combination of NCAR and SEM.  At one point we tried
+       * inverting the matrix via an alternative method that does not rely on
+       * positive definiteness (see below), but that just led to further errors.
+       * Instead, the program halts as gracefully as possible.
+       */
+      //Inverting the matrix anyway:
+      //Rprintf(": Warning, the matrix being inverted was not positive definite on minor order %d.\n", errorM);
+      //dinv2D_sym(X,size,X_inv,emsg);
+      //skip=1;
+      Rprintf(": Error, the matrix being inverted was not positive definite on minor order %d.\n", errorM);
+      error("The program cannot continue; try a different model or including supplemental data.\n");
+    } else {
+      Rprintf(": The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+      error("Exiting from dinv2D().\n");
+    }
+  }
+
+  if (skip==0) {
+  for (i = 0, j = 0; j < size; j++) {
+    for (k = 0; k <= j; k++) {
+      *(X_inv+size*j+k) = pdInv[i];
+      *(X_inv+size*k+j) = pdInv[i++];
+    }
+  }
+  }
+
+  Free(pdInv);
+}
+
+
+/* inverting a matrix, assumes symmtretric, but not pos def
+ * Uses special syntax since we don't know dimensions of array
+ * Prevents memory errors for matrices created with double[][]
+*/
+void dinv2D_sym(double* X,
+	  int	size,
+	  double* X_inv,char* emsg)
+{
+  int i,j, k, errorM, size2;
+  size2=size*size;
+  double *pdInv = doubleArray(size2);
+  double *B= doubleArray(size2);
+  int *factor_out = intArray(size);
+
+  //init pdInv and B.  B is identity
+  for (i = 0, j = 0; j < size; j++)
+    for (k = 0; k < size; k++) {
+      if (j==k) B[i]=1;
+      else B[i]=0;
+      pdInv[i]=*(X+k*size+j);
+      i++;
+    }
+
+  //for (i = 0, j = 0; j < size; j++)
+  //  for (k = 0; k <= j; k++) {
+  //    pdInv[i++] = *(X+k*size+j);
+  //  }
+
+  double *work0 = doubleArray(size2);
+  int test=-1;
+  F77_CALL(dsysv)("U", &size, &size, pdInv, &size, factor_out, B, &size, work0, &test, &errorM);
+  int lwork=(int)work0[0];
+  Free(work0);
+
+  //Rprintf("work size %d\n",lwork);
+  double *work = doubleArray(lwork);
+  //Rprintf("In A: %5g %5g %5g %5g\n",pdInv[0],pdInv[1],pdInv[2],pdInv[3]);
+  //Rprintf("In B: %5g %5g %5g %5g\n",B[0],B[1],B[2],B[3]);
+  F77_CALL(dsysv)("U", &size, &size, pdInv, &size, factor_out, B, &size, work, &lwork, &errorM);
+  Free(work);
+  //Rprintf("Out1: %5g %5g %5g %5g %d\n",B[0],B[1],B[2],B[3],errorM);
+
+  if (errorM) {
+    Rprintf(emsg);
+    if (errorM>0) {
+      Rprintf(": The matrix being inverted is singular. Error code %d\n", errorM);
+    } else {
+      Rprintf(": The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+    }
+    error("Exiting from dinv2D_sym() (dsytrf).\n");
+  }
+
+  for (i = 0, j = 0; j < size; j++) {
+    for (k = 0; k < size; k++) {
+      *(X_inv+size*j+k) = B[i++];
+    }
+  }
+
+  free(factor_out);
+  Free(B);
+  Free(pdInv);
 }
 
 
@@ -81,12 +253,16 @@ void dcholdc(double **X, int size, double **L)
   int i, j, k, errorM;
   double *pdTemp = doubleArray(size*size);
 
-  for (j = 0, i = 0; j < size; j++) 
-    for (k = 0; k <= j; k++) 
+  for (j = 0, i = 0; j < size; j++)
+    for (k = 0; k <= j; k++)
       pdTemp[i++] = X[k][j];
   F77_CALL(dpptrf)("U", &size, pdTemp, &errorM);
   if (errorM) {
-    Rprintf("LAPACK dpptrf failed, %d\n", errorM);
+    if (errorM>0) {
+      Rprintf("The matrix being inverted was not positive definite. Error code %d\n", errorM);
+    } else {
+      Rprintf("The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+    }
     error("Exiting from dcholdc().\n");
   }
   for (j = 0, i = 0; j < size; j++) {
@@ -98,8 +274,8 @@ void dcholdc(double **X, int size, double **L)
     }
   }
 
-  free(pdTemp);
-} 
+  Free(pdTemp);
+}
 
 /* calculate the determinant of the positive definite symmetric matrix
    using the Cholesky decomposition  */
@@ -108,7 +284,7 @@ double ddet(double **X, int size, int give_log)
   int i;
   double logdet=0.0;
   double **pdTemp = doubleMatrix(size, size);
-  
+
   dcholdc(X, size, pdTemp);
   for(i = 0; i < size; i++)
     logdet += log(pdTemp[i][i]);
@@ -121,3 +297,74 @@ double ddet(double **X, int size, int give_log)
 }
 
 
+/* calculate the determinant of the positive definite symmetric matrix
+   using the Cholesky decomposition  use with double[][]*/
+double ddet2D(double** X, int size, int give_log)
+{
+  int i;
+  double logdet=0.0;
+  double **pdTemp = doubleMatrix(size, size);
+
+  dcholdc2D((double*)(&X[0][0]), size, (double*)(&pdTemp[0][0]));
+  for(i = 0; i < size; i++)
+    logdet += log(pdTemp[i][i]);
+
+  FreeMatrix(pdTemp, size);
+  if(give_log)
+    return(2.0*logdet);
+  else
+    return(exp(2.0*logdet));
+}
+
+/*double ddet2Db(double* X, int size, int give_log)
+{
+  int i;
+  double logdet=0.0;
+  double **pdTemp = doubleMatrix(size, size);
+
+  dcholdc2D(X, size, (double*)(&pdTemp[0][0]));
+  for(i = 0; i < size; i++)
+    logdet += log(pdTemp[i][i]);
+
+  FreeMatrix(pdTemp, size);
+  if(give_log)
+    return(2.0*logdet);
+  else
+    return(exp(2.0*logdet));
+}*/
+
+/* Cholesky decomposition */
+/* returns lower triangular matrix; use with double[][] */
+void dcholdc2D(double *X, int size, double *L)
+{
+  int i, j, k, errorM;
+  double *pdTemp = doubleArray(size*size);
+
+  for (j = 0, i = 0; j < size; j++)
+    for (k = 0; k <= j; k++)
+      pdTemp[i++] = *(X+size*k+j); //pdTemp[i++] = X[k][j];
+  F77_CALL(dpptrf)("U", &size, pdTemp, &errorM);
+  if (errorM) {
+    if (errorM>0) {
+      Rprintf("The matrix being inverted was not positive definite. Error code %d\n", errorM);
+    } else {
+      Rprintf("The matrix being inverted contained an illegal value. Error code %d.\n", errorM);
+    }
+    error("Exiting from dcholdc2D().\n");
+  }
+  for (j = 0, i = 0; j < size; j++) {
+    for (k = 0; k < size; k++) {
+      if(j<k)
+        *(L+size*j+k)=0.0; //L[j][k] = 0.0;
+      else
+        *(L+size*j+k)=pdTemp[i++]; //L[j][k] = pdTemp[i++];
+    }
+  }
+
+  Free(pdTemp);
+}
+
+int main () {
+  printf("hello world");
+  return 0;
+}
diff --git a/src/subroutines.h b/src/subroutines.h
index 528a266..6d10107 100644
--- a/src/subroutines.h
+++ b/src/subroutines.h
@@ -1,5 +1,5 @@
 /******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
+  This file is a part of eco: R Package for Fitting Bayesian Models
   of Ecological Inference for 2x2 Tables
   by Kosuke Imai and Ying Lu
   Copyright: GPL version 2 or later.
@@ -7,5 +7,10 @@
 
 void SWP( double **X, int k, int size);
 void dinv(double **X, int size, double **X_inv);
+void dinv2D(double *X, int size, double *X_inv,char* emsg);
+void dinv2D_sym(double *X, int size, double *X_inv,char* emsg);
 void dcholdc(double **X, int size, double **L);
 double ddet(double **X, int size, int give_log);
+double ddet2D(double **X, int size, int give_log);
+void dcholdc2D(double *X, int size, double *L);
+void matrixMul(double **A, double **B, int r1, int c1, int r2, int c2, double **C);
diff --git a/src/vector.c b/src/vector.c
index 8c62425..cb93c7b 100644
--- a/src/vector.c
+++ b/src/vector.c
@@ -1,5 +1,5 @@
 /******************************************************************
-  This file is a part of eco: R Package for Fitting Bayesian Models 
+  This file is a part of eco: R Package for Fitting Bayesian Models
   of Ecological Inference for 2x2 Tables
   by Kosuke Imai and Ying Lu
   Copyright: GPL version 2 or later.
@@ -9,14 +9,17 @@
 #include <assert.h>
 #include <stdio.h>
 #include <R_ext/Utils.h>
+#include <R_ext/PrtUtil.h>
 #include <R.h>
 
 int* intArray(int num) {
   int *iArray = (int *)malloc(num * sizeof(int));
   if (iArray)
     return iArray;
-  else 
+  else {
     error("Out of memory error in intArray\n");
+    return NULL;
+  }
 }
 
 int** intMatrix(int row, int col) {
@@ -25,63 +28,77 @@ int** intMatrix(int row, int col) {
   if (iMatrix) {
     for (i = 0; i < row; i++) {
       iMatrix[i] = (int *)malloc(col *  sizeof(int));
-      if (!iMatrix[i]) 
+      if (!iMatrix[i])
 	error("Out of memory error in intMatrix\n");
     }
     return iMatrix;
   }
-  else 
+  else {
     error("Out of memory error in intMatrix\n");
+    return NULL;
+  }
 }
 
 double* doubleArray(int num) {
-  double *dArray = (double *)malloc(num * sizeof(double));
+  //double *dArray = (double *)malloc(num * sizeof(double));
+  double *dArray = Calloc(num,double);
   if (dArray)
     return dArray;
-  else
+  else {
     error("Out of memory error in doubleArray\n");
+    return NULL;
+  }
 }
 
 double** doubleMatrix(int row, int col) {
   int i;
-  double **dMatrix = (double **)malloc((size_t)(row * sizeof(double *)));
+  //double **dMatrix = (double **)malloc((size_t)(row * sizeof(double *)));
+  double **dMatrix = Calloc(row,double*);
   if (dMatrix) {
     for (i = 0; i < row; i++) {
-      dMatrix[i] = (double *)malloc((size_t)(col * sizeof(double)));
-      if (!dMatrix[i])
-	error("Out of memory error in doubleMatrix\n");
+      dMatrix[i] = Calloc(col,double);
+      if (!dMatrix[i]) {
+        error("Out of memory error in doubleMatrix\n");
+        return NULL;
+      }
     }
     return dMatrix;
   }
-  else
+  else {
     error("Out of memory error in doubleMatrix\n");
+    return NULL;
+  }
 }
 
 double*** doubleMatrix3D(int x, int y, int z) {
   int i;
   double ***dM3 = (double ***)malloc(x * sizeof(double **));
   if (dM3) {
-    for (i = 0; i < x; i++) 
+    for (i = 0; i < x; i++)
       dM3[i] = doubleMatrix(y, z);
     return dM3;
   }
-  else 
+  else {
     error("Out of memory error in doubleMatrix3D\n");
+    return NULL;
+  }
 }
 
 long* longArray(int num) {
   long *lArray = (long *)malloc(num * sizeof(long));
   if (lArray)
     return lArray;
-  else 
+  else {
     error("Out of memory error in longArray\n");
+    return NULL;
+  }
 }
 
 void FreeMatrix(double **Matrix, int row) {
   int i;
   for (i = 0; i < row; i++)
-    free(Matrix[i]);
-  free(Matrix);
+    Free(Matrix[i]);
+  Free(Matrix);
 }
 
 void FreeintMatrix(int **Matrix, int row) {
@@ -97,7 +114,7 @@ void Free3DMatrix(double ***Matrix, int index, int row) {
     FreeMatrix(Matrix[i], row);
   free(Matrix);
 }
-		
-	
-		
-			
+
+
+
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/r-cran-eco.git



More information about the debian-science-commits mailing list