[r-cran-bayesm] 13/44: Import Upstream version 2.0-1

Andreas Tille tille at debian.org
Thu Sep 7 11:16:20 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository r-cran-bayesm.

commit c5946ac6ece6052db61102e336b4667ad8b5171e
Author: Andreas Tille <tille at debian.org>
Date:   Thu Sep 7 13:08:41 2017 +0200

    Import Upstream version 2.0-1
---
 DESCRIPTION                |  13 ++--
 NAMESPACE                  |   3 +-
 R/clusterMix.R             | 131 ++++++++++++++++++++++++++++++++
 R/llmnl.R                  |   3 +-
 R/llmnp.R                  |   3 +-
 R/mixDenBi.R               |  74 ++++++++++++++++++
 R/mnlHess.R                |   5 +-
 R/{llmnp.R => mnpProb.R}   |  33 ++++----
 R/momMix.R                 |   3 +-
 R/rbiNormGibbs.R           | 116 ++++++++++++++++++++++++++++
 R/rhierMnlRwMixture.R      |  20 +++--
 R/rhierNegbinRw.R          |  19 +++--
 R/rmixGibbs.R              |   3 +
 R/rmnlIndepMetrop.R        |  10 ++-
 R/rnmixGibbs.R             |   8 +-
 R/rsurGibbs.R              | 183 +++++++++++++++++++++++++++++++++++++++++++++
 R/runireg.R                | 153 ++++++++++++++++++++++++++++++-------
 R/runiregGibbs.R           |   2 +-
 inst/doc/bayesm-manual.pdf | Bin 390397 -> 374023 bytes
 man/bank.Rd                |   2 +-
 man/breg.Rd                |   5 +-
 man/clusterMix.Rd          |  88 ++++++++++++++++++++++
 man/condMom.Rd             |   2 +-
 man/createX.Rd             |   4 +-
 man/eMixMargDen.Rd         |   2 +-
 man/ghkvec.Rd              |   3 +-
 man/init.rmultiregfp.Rd    |   2 +-
 man/llmnl.Rd               |   8 +-
 man/llmnp.Rd               |  11 +--
 man/llnhlogit.Rd           |   2 +-
 man/lndIChisq.Rd           |   2 +-
 man/lndIWishart.Rd         |   2 +-
 man/lndMvn.Rd              |   2 +-
 man/lndMvst.Rd             |   2 +-
 man/logMargDenNR.Rd        |   2 +-
 man/mixDen.Rd              |  10 ++-
 man/mixDenBi.Rd            |  58 ++++++++++++++
 man/mnlHess.Rd             |   8 +-
 man/mnpProb.Rd             |  50 +++++++++++++
 man/momMix.Rd              |   2 +-
 man/numEff.Rd              |   2 +-
 man/rbiNormGibbs.Rd        |  40 ++++++++++
 man/rbprobitGibbs.Rd       |   2 +-
 man/rdirichlet.Rd          |   2 +-
 man/rhierBinLogit.Rd       |   4 +-
 man/rhierLinearModel.Rd    |   2 +-
 man/rhierMnlRwMixture.Rd   |   3 +-
 man/rhierNegbinRw.Rd       |   2 +-
 man/rivGibbs.Rd            |   8 ++
 man/rmixGibbs.Rd           |   2 +-
 man/rmnlIndepMetrop.Rd     |   4 +-
 man/rmnpGibbs.Rd           |   2 +-
 man/rmultireg.Rd           |   2 +-
 man/rmultiregfp.Rd         |   2 +-
 man/rmvpGibbs.Rd           |   2 +-
 man/rmvst.Rd               |   2 +-
 man/rnegbinRw.Rd           |   2 +-
 man/rnmixGibbs.Rd          |  53 ++++++++++++-
 man/rscaleUsage.Rd         |   5 +-
 man/rsurGibbs.Rd           |  98 ++++++++++++++++++++++++
 man/rtrun.Rd               |   2 +-
 man/runireg.Rd             |  64 +++++++---------
 man/runiregGibbs.Rd        |  16 ++--
 man/rwishart.Rd            |   2 +-
 man/simmnl.Rd              |   2 +-
 man/simmnlwX.Rd            |   2 +-
 man/simmnp.Rd              |   2 +-
 man/simmvp.Rd              |   2 +-
 man/simnhlogit.Rd          |   2 +-
 69 files changed, 1198 insertions(+), 184 deletions(-)

diff --git a/DESCRIPTION b/DESCRIPTION
index 13c949f..78c4d49 100755
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: bayesm
-Version: 1.1-2
-Date: 2005-07-05
+Version: 2.0-1
+Date: 2005-10-05
 Title:Bayesian Inference for Marketing/Micro-econometrics 
 Author: Peter Rossi <peter.rossi at ChicagoGsb.edu>, 
         Rob McCulloch <robert.mcculloch at ChicagoGsb.edu>.
@@ -10,10 +10,11 @@ Description: bayesm covers many important models used
   in marketing and micro-econometrics applications. 
   The package includes:
   Bayes Regression (univariate or multivariate dep var),
+  Bayes Seemingly Unrelated Regression (SUR),
   Multinomial Logit (MNL) and Multinomial Probit (MNP),
   Multivariate Probit,
-  Negative Binomial Regression,
-  Multivariate Mixtures of Normals,
+  Negative Binomial (Poisson) Regression,
+  Multivariate Mixtures of Normals (including clustering),
   Hierarchical Linear Models with normal prior and covariates,
   Hierarchical Multinomial Logits with mixture of normals prior
      and covariates,
@@ -24,7 +25,7 @@ Description: bayesm covers many important models used
   Analyis of Multivariate Ordinal survey data with scale
      usage heterogeneity (as in Rossi et al, JASA (01)).
   For further reference, consult our book, Bayesian Statistics and
-  Marketing by Allenby, McCulloch and Rossi. 
+  Marketing by Rossi, Allenby and McCulloch. 
 License: GPL (version 2 or later)
 URL: http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html
-Packaged: Tue Jul  5 16:42:19 2005; per
+Packaged: Thu Sep 29 11:05:02 2005; per
diff --git a/NAMESPACE b/NAMESPACE
index 5ebdf40..63770f8 100755
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -6,7 +6,8 @@ rmixture,rmultireg,rmultiregfp,rwishart,rmvst,rtrun,rbprobitGibbs,runireg,
 runiregGibbs,simmnl,simmnp,simmvp,simnhlogit,rmnpGibbs,rmixGibbs,rnmixGibbs,
 rmvpGibbs,rhierLinearModel,rhierMnlRwMixture,simmnlwX,rivGibbs,
 rmnlIndepMetrop,rscaleUsage,ghkvec,condMom,logMargDenNR,init.rmultiregfp,
-rhierBinLogit,rnegbinRw,rhierNegbinRw)
+rhierBinLogit,rnegbinRw,rhierNegbinRw,rbiNormGibbs,clusterMix,rsurGibbs,
+mixDenBi,mnpProb)
 
 
 
diff --git a/R/clusterMix.R b/R/clusterMix.R
new file mode 100755
index 0000000..c3ebab1
--- /dev/null
+++ b/R/clusterMix.R
@@ -0,0 +1,131 @@
+clusterMix=
+function(zdraw,cutoff=.9,SILENT=FALSE){
+#
+#
+# revision history:
+#   written by p. rossi 9/05
+#
+# purpose: cluster observations based on draws of indicators of 
+#   normal mixture components
+#
+# arguments:
+#   zdraw is a R x nobs matrix of draws of indicators (typically output from rnmixGibbs)
+#   the rth row of zdraw contains rth draw of indicators for each observations
+#   each element of zdraw takes on up to p values for up to p groups. The maximum
+#   number of groups is nobs.  Typically, however, the number of groups will be small
+#   and equal to the number of components used in the normal mixture fit.
+#
+#   cutoff is a cutoff used in determining one clustering scheme it must be 
+#   a number between .5 and 1.
+#
+# output:
+#   two clustering schemes each with a vector of length nobs which gives the assignment
+#   of each observation to a cluster
+#
+#   clustera (finds zdraw with similarity matrix closest to posterior mean of similarity)
+#   clusterb (finds clustering scheme by assigning ones if posterior mean of similarity matrix
+#             > cutoff and computing associated z )
+#
+# define needed functions
+#
+# ------------------------------------------------------------------------------------------   
+
+ztoSim=function(z){
+#
+# function to convert indicator vector to Similarity matrix
+# Sim is n x n matrix, Sim[i,j]=1 if pair(i,j) are in same group
+# z is n x 1 vector of indicators (1,...,p)
+#
+# p.rossi 9/05
+#
+n=length(z)
+zvec=c(rep(z,n))
+zcomp=z%x%c(rep(1,n))
+Sim=as.numeric((zvec==zcomp))
+dim(Sim)=c(n,n)
+return(Sim)
+}
+Simtoz=function(Sim){
+#
+# function to convert Similarity matrix to indicator vector
+#  Sim is n x n matrix, Sim[i,j]=1 if pair(i,j) are in same group
+#  z is vector of indicators from (1,...,p) of group memberships (dim n)
+#
+#
+# p.rossi 9/05
+n=ncol(Sim)
+z=double(n)
+i=1
+groupn=1
+while (i <= n){
+  validind=z==0
+  if(sum(Sim[validind,i]==1)>=1) {
+     z[validind]=as.numeric(Sim[validind,i]==1)*groupn
+     groupn=groupn+1
+  }
+  i=i+1
+}
+return(z)
+} 
+# ----------------------------------------------------------------------------------------
+#
+# check arguments
+#
+pandterm=function(message) { stop(message,call.=FALSE) }
+if(missing(zdraw)) {pandterm("Requires zdraw argument -- R x n matrix of indicator draws")}
+#
+# check validity of zdraw rows -- must be integers in the range 1:nobs
+#
+nobs=ncol(zdraw)
+R=nrow(zdraw)
+if(sum(zdraw %in% (1:nobs)) < ncol(zdraw)*nrow(zdraw))
+   {pandterm("Bad zdraw argument -- all elements must be integers in 1:nobs")}
+cat("Table of zdraw values pooled over all rows",fill=TRUE)
+print(table(zdraw))
+#
+# check validity of cuttoff
+if(cutoff > 1 || cutoff < .5) {pandterm(paste("cutoff invalid, = ",cutoff))}
+
+#
+# compute posterior mean of Similarity matrix
+#
+#
+if(!SILENT){
+   cat("Computing Posterior Expectation of Similarity Matrix",fill=TRUE)
+   cat("processing draws ...",fill=TRUE); fsh()
+}
+Pmean=matrix(0,nrow=nobs,ncol=nobs)
+R=nrow(zdraw)
+for (r in 1:R) {
+   Pmean=Pmean+ztoSim(out$zdraw[r,])
+   if(!SILENT) {if(r%%100 == 0) {cat("  ",r,fill=TRUE); fsh()}}
+}
+Pmean=Pmean/R
+
+#
+# now find index for draw which minimizes discrepancy between
+# post exp of similarity and sim implied by that z
+if(!SILENT){
+  cat(" ",fill=TRUE)
+  cat("Look for zdraw which minimizes loss",fill=TRUE)
+  cat("processing draws ...",fill=TRUE); fsh()
+}
+loss=double(R)
+for (r in 1:R){
+  loss[r]=sum(abs(Pmean-ztoSim(out$zdraw[r,]))) 
+  if(!SILENT) {if(r%%100 == 0) {cat("  ",r,fill=TRUE);fsh()}}
+}
+index=which(loss==min(loss))
+clustera=zdraw[index[1],]
+#
+# now due clustering by assigning Similarity to any (i,j) pair for which
+# Pmean > cutoff
+Sim=matrix(as.numeric(Pmean >= cutoff),ncol=nobs)
+clusterb=Simtoz(Sim)
+return(list(clustera=clustera,clusterb=clusterb))
+}
+   
+      
+
+
+
diff --git a/R/llmnl.R b/R/llmnl.R
index ef49256..7016a13 100755
--- a/R/llmnl.R
+++ b/R/llmnl.R
@@ -1,7 +1,8 @@
 llmnl= 
-function(y,X,beta) 
+function(beta,y,X) 
 {
 #    p. rossi 2004
+#    changed order of arguments to put beta first 9/05
 #
 # Purpose:evaluate log-like for MNL
 #
diff --git a/R/llmnp.R b/R/llmnp.R
index abc9c81..5fa9cd7 100755
--- a/R/llmnp.R
+++ b/R/llmnp.R
@@ -1,10 +1,11 @@
 llmnp=
-function(X,y,beta,Sigma,r) 
+function(beta,Sigma,X,y,r) 
 {
 #
 # revision history:
 #   edited by rossi 2/8/05
 #   adde 1.0e-50 before taking log to avoid -Inf 6/05
+#   changed order of arguments to put beta first 9/05
 #
 # purpose:
 #   function to evaluate MNP likelihood using GHK
diff --git a/R/mixDenBi.R b/R/mixDenBi.R
new file mode 100755
index 0000000..68802ae
--- /dev/null
+++ b/R/mixDenBi.R
@@ -0,0 +1,74 @@
+mixDenBi=
+function(i,j,xi,xj,pvec,comps) 
+{
+# Revision History:
+#   P. Rossi 6/05
+#
+# purpose: compute marg bivariate density implied by mixture of multivariate normals specified
+#			by pvec,comps
+#
+# arguments:
+#     i,j:  index of two variables
+#     xi specifies a grid of points for var i
+#     xj specifies a grid of points for var j
+#     pvec: prior probabilities of normal components
+#     comps: list, each member is a list comp with ith normal component ~ N(comp[[1]],Sigma), 
+#            Sigma = t(R)%*%R, R^{-1} = comp[[2]]
+# Output:
+#     matrix with values of density on grid
+#
+# ---------------------------------------------------------------------------------------------
+# define function needed
+#
+bivcomps=function(i,j,comps)
+{
+# purpose: obtain marginal means and standard deviations from list of normal components
+# arguments:
+#     i,j: index of elements for bivariate marginal
+#     comps: list, each member is a list comp with ith normal component ~N(comp[[1]],Sigma), 
+#            Sigma = t(R)%*%R, R^{-1} = comp[[2]]
+# returns:
+#  a list with relevant mean vectors and rooti for each compenent
+#  [[2]]=$sigma a matrix whose ith row is the standard deviations for the ith component
+#
+result=NULL
+nc = length(comps)
+dim = length(comps[[1]][[1]])
+ind=matrix(c(i,j,i,j,i,i,j,j),ncol=2)
+for(comp in 1:nc) {
+   mu = comps[[comp]][[1]][c(i,j)]
+   root= backsolve(comps[[comp]][[2]],diag(dim))
+   Sigma=crossprod(root)
+   sigma=matrix(Sigma[ind],ncol=2)
+   rooti=backsolve(chol(sigma),diag(2))
+   result[[comp]]=list(mu=mu,rooti=rooti)
+}
+return(result)
+}
+normden=
+function(x,mu,rooti)
+{
+#
+# function to evaluate MV NOrmal density with  mean mu, var Sigma
+# and with sigma^-1=rooti%*%t(rooti)   
+# rooti is in the inverse of upper triangular chol root of sigma
+#          note: this is the UL decomp of sigmai not LU!
+#                Sigma=root'root   root=inv(rooti)
+#
+z=as.vector(t(rooti)%*%(x-mu))
+exp(-.5*length(x)*log(2*pi)-.5*(z%*%z) + sum(log(diag(rooti))))
+}
+# ----------------------------------------------------------------------------------------------
+nc = length(comps)
+marmoms=bivcomps(i,j,comps)
+den = matrix(0.0,nrow=length(xi),ncol=length(xj))
+for(indi in 1:length(xi)) {
+   for(indj in 1:length(xj)) {
+      for(comp in 1:nc) {
+          den[indi,indj] = den[indi,indj] + normden(c(xi[indi],xj[indj]),marmoms[[comp]]$mu,
+                                marmoms[[comp]]$rooti)*pvec[comp]
+      }
+    } 
+}
+return(den)
+}
diff --git a/R/mnlHess.R b/R/mnlHess.R
index a3cfb2c..6a225f9 100755
--- a/R/mnlHess.R
+++ b/R/mnlHess.R
@@ -1,14 +1,15 @@
 mnlHess =
-function(y,X,beta) 
+function(beta,y,X) 
 {
 #   p.rossi 2004
+#   changed argument order 9/05
 #
 # Purpose: compute mnl -Expected[Hessian]  
 #
 # Arguments:
+#   beta is k vector of coefs
 #   y is n vector with element = 1,...,j indicating which alt chosen
 #   X is nj x k matrix of xvalues for each of j alt on each of n occasions
-#   beta is k vector of coefs
 #
 # Output:  -Hess evaluated at beta
 #
diff --git a/R/llmnp.R b/R/mnpProb.R
similarity index 63%
copy from R/llmnp.R
copy to R/mnpProb.R
index abc9c81..3ceda5b 100755
--- a/R/llmnp.R
+++ b/R/mnpProb.R
@@ -1,23 +1,22 @@
-llmnp=
-function(X,y,beta,Sigma,r) 
+mnpProb=
+function(beta,Sigma,X,r=100) 
 {
 #
 # revision history:
-#   edited by rossi 2/8/05
-#   adde 1.0e-50 before taking log to avoid -Inf 6/05
+#  written by Rossi 9/05
 #
 # purpose:
-#   function to evaluate MNP likelihood using GHK
+#   function to MNP probabilities for a given X matrix (corresponding
+#   to "one" observation
 #
 # arguments: 
-#   X is n*(p-1) x k array of covariates (including intercepts)
+#   X is p-1 x k array of covariates (including intercepts)
 #      note: X is from the "differenced" system
-#   y is vector of n indicators of multinomial response
 #   beta is k x 1  with k = ncol(X)
 #   Sigma is p-1 x p-1 
 #   r is the number of random draws to use in GHK
 #
-# output -- value of log-likelihood
+# output -- probabilities
 # for each observation w = Xbeta + e   e ~N(0,Sigma)
 #   if y=j (j<p)
 #      w_j > max(w_-j) and w_j >0
@@ -41,31 +40,25 @@ ghkvec = function(L,trunpt,above,r){
    .C('ghk_vec',as.integer(n),as.double(L),as.double(trunpt),as.integer(above),as.integer(dim),
    as.integer(r),res=double(n))$res}
 #   
-# compute means for each observation
-#
 pm1=ncol(Sigma)
 k=length(beta)
 mu=matrix(X%*%beta,nrow=pm1)
-logl=0.0
 above=rep(0,pm1)
+prob=double(pm1+1)
 for (j in 1:pm1) {
-   muj=mu[,y==j]
    Aj=-diag(pm1)
    Aj[,j]=rep(1,pm1)
-   trunpt=as.vector(-Aj%*%muj)
+   trunpt=as.vector(-Aj%*%mu)
    Lj=t(chol(Aj%*%Sigma%*%t(Aj)))
 #     note: rob's routine expects lower triangular root
-   logl=logl + sum(log(ghkvec(Lj,trunpt,above,r)+1.0e-50))
+   prob[j]=ghkvec(Lj,trunpt,above,r)
 #     note:  ghkvec does an entire vector of n probs each with different truncation points but the
 #            same cov matrix.  
 }
 #
-# now do obs for y=p
+# now do pth alternative
 #
-trunpt=as.vector(-mu[,y==(pm1+1)])
-Lj=t(chol(Sigma))
-above=rep(1,pm1)
-logl=logl+sum(log(ghkvec(Lj,trunpt,above,r)+1.0e-50))
-return(logl)
+prob[pm1+1]=1-sum(prob[1:pm1])
+return(prob)
 
 }
diff --git a/R/momMix.R b/R/momMix.R
index 039c2eb..e89d342 100755
--- a/R/momMix.R
+++ b/R/momMix.R
@@ -5,6 +5,7 @@ function(probdraw,compdraw)
 # Revision History:
 #   R. McCulloch 11/04
 #   P. Rossi 3/05  put in backsolve fixed documentation
+#   P. Rossi 9/05 fixed error in mom -- return var not sigma
 #
 # purpose: compute moments of normal mixture averaged over MCMC draws
 #
@@ -43,7 +44,7 @@ for(i in 1:nc) {
    sigma=t(root)%*%root
    var=var+prob[i]*sigma+prob[i]*(mui-mu)%o%(mui-mu)
 }
-list(mu=mu,sigma=sigma)
+list(mu=mu,sigma=var)
 }
 #---------------------------------------------------------------------------------------
 dim=length(compdraw[[1]][[1]][[1]])
diff --git a/R/rbiNormGibbs.R b/R/rbiNormGibbs.R
new file mode 100755
index 0000000..7607c33
--- /dev/null
+++ b/R/rbiNormGibbs.R
@@ -0,0 +1,116 @@
+rbiNormGibbs=function(initx=2,inity=-2,rho,burnin=100,R=500)
+{
+#
+# revision history:
+#     P. Rossi 1/05
+#
+# purpose:
+#    illustrate the function of bivariate normal gibbs sampler
+#
+# arguments:
+#   initx,inity  initial values for draw sequence
+#   rho  correlation
+#   burnin draws to be discarded in final paint
+#   R -- number of draws
+#
+# output:
+#   opens graph window and paints all moves and normal contours
+#   list containing draw matrix
+#
+# model:
+#  theta is bivariate normal with zero means, unit variances and correlation rho
+#
+# define needed functions
+#
+kernel=
+function(x,mu,rooti){
+# function to evaluate -.5*log of MV NOrmal density kernel with  mean mu, var Sigma
+# and with sigma^-1=rooti%*%t(rooti)   
+# rooti is in the inverse of upper triangular chol root of sigma
+#          note: this is the UL decomp of sigmai not LU!
+#                Sigma=root'root   root=inv(rooti)
+z=as.vector(t(rooti)%*%(x-mu))
+(z%*%z)
+}
+#
+pandterm=function(message) {stop(message,call.=FALSE)}
+#
+# check input arguments
+#
+if(missing(rho)) {pandterm("Requires rho argument ")}
+#
+# print out settings
+#
+cat("Bivariate Normal Gibbs Sampler",fill=TRUE)
+cat("rho= ",rho,fill=TRUE)
+cat("initial x,y coordinates= (",initx,",",inity,")",fill=TRUE)
+cat("burn-in= ",burnin," R= ",R,fill=TRUE)
+cat(" ",fill=TRUE)
+cat(" ",fill=TRUE)
+
+sd=(1-rho**2)**(.5)
+sigma=matrix(c(1,rho,rho,1),ncol=2)
+rooti=backsolve(chol(sigma),diag(2))
+mu=c(0,0)
+
+x=seq(-3.5,3.5,length=100)
+y=x
+z=matrix(double(100*100),ncol=100)
+for (i in 1:length(x)) 
+{
+   for(j in 1:length(y))
+   {
+   z[i,j]=kernel(c(x[i],y[j]),mu,rooti)
+   }
+}
+prob=c(.1,.3,.5,.7,.9,.99)
+lev=qchisq(prob,2)
+
+
+par(mfrow=c(1,1))
+contour(x,y,z,levels=lev,labels=prob,
+   xlab="theta1",ylab="theta2",drawlabels=TRUE,col="green",labcex=1.3,lwd=2.0)
+title(paste("Gibbs Sampler with Intermediate Moves: Rho =",rho))
+
+points(initx,inity,pch="B",cex=1.5)
+
+oldx=initx
+oldy=inity
+continue="y"
+r=0
+draws=matrix(double(R*2),ncol=2)
+draws[1,]=c(initx,inity)
+cat(" ")
+cat("Starting Gibbs Sampler ....",fill=TRUE)
+cat("(hit enter or y to display moves one-at-a-time)",fill=TRUE)
+cat("('go' to paint all moves without stopping to prompt)",fill=TRUE)
+cat(" ",fill=TRUE)
+while(continue != "n"&& r < R)
+{
+  if(continue != "go") continue=readline("cont?")
+  newy=sd*rnorm(1) + rho*oldx
+  lines(c(oldx,oldx),c(oldy,newy),col="magenta",lwd=1.5)
+  newx=sd*rnorm(1)+rho*newy
+  lines(c(oldx,newx),c(newy,newy),col="magenta",lwd=1.5)	
+  oldy=newy
+  oldx=newx
+  r=r+1
+  draws[r,]=c(newx,newy)
+}
+continue=readline("Show Comparison to iid Sampler?")
+if(continue != "n" & continue != "No" & continue != "no"){
+   par(mfrow=c(1,2))
+   contour(x,y,z,levels=lev,
+      xlab="theta1",ylab="theta2",drawlabels=TRUE,labels=prob,labcex=1.1,col="green",lwd=2.0)
+   title(paste("Gibbs Draws: Rho =",rho))
+   points(draws[(burnin+1):R,],pch=20,col="magenta",cex=.7)
+
+   idraws=t(chol(sigma))%*%matrix(rnorm(2*(R-burnin)),nrow=2)
+   idraws=t(idraws)
+   contour(x,y,z,levels=lev,
+      xlab="theta1",ylab="theta2",drawlabels=TRUE,labels=prob,labcex=1.1,col="green",lwd=2.0)
+   title(paste("IID draws: Rho =",rho))
+   points(idraws,pch=20,col="magenta",cex=.7)
+}
+return(draws)
+}
diff --git a/R/rhierMnlRwMixture.R b/R/rhierMnlRwMixture.R
index f4f4995..b2e9794 100755
--- a/R/rhierMnlRwMixture.R
+++ b/R/rhierMnlRwMixture.R
@@ -5,6 +5,7 @@ function(Data,Prior,Mcmc)
 # revision history:
 #   changed 12/17/04 by rossi to fix bug in drawdelta when there is zero/one unit
 #   in a mixture component
+#   added loglike output, changed to reflect new argument order in llmnl, mnlHess 9/05
 #
 # purpose: run hierarchical mnl logit model with mixture of normals 
 #   using RW and cov(RW inc) = (hess_i + Vbeta^-1)^-1
@@ -32,6 +33,7 @@ function(Data,Prior,Mcmc)
 #   probdraw is R/keep x ncomp matrix of draws of probs of mixture components
 #   compdraw is a list of list of lists (length R/keep)
 #      compdraw[[rep]] is the repth draw of components for mixtures
+#   loglike  log-likelikelhood at each kept draw
 #
 # Priors:
 #    beta_i = D %*% z[i,] + u_i
@@ -175,6 +177,7 @@ betadraw=array(double((floor(R/keep))*nlgt*nvar),dim=c(nlgt,nvar,floor(R/keep)))
 probdraw=matrix(double((floor(R/keep))*ncomp),ncol=ncomp)
 oldbetas=matrix(double(nlgt*nvar),ncol=nvar)
 oldll=double(nlgt)
+loglike=double(floor(R/keep))
 oldcomp=NULL
 compdraw=NULL
 #--------------------------------------------------------------------------------------------------
@@ -184,7 +187,7 @@ compdraw=NULL
 llmnlFract=
 function(beta,y,X,betapooled,rootH,w){
 z=as.vector(rootH%*%(beta-betapooled))
-llmnl(y,X,beta)+w*(-.5*(z%*%z))
+llmnl(beta,y,X)+w*(-.5*(z%*%z))
 }
 
 mnlRwMetropOnce=
@@ -200,7 +203,7 @@ function(y,X,oldbeta,oldll,s,inc.root,betabar,rootpi){
 # oldbeta is the current
      stay=0
      betac=oldbeta + s*t(inc.root)%*%(matrix(rnorm(ncol(X)),ncol=1))
-     cll=llmnl(y,X,betac)
+     cll=llmnl(betac,y,X)
      clpost=cll+lndMvn(betac,betabar,rootpi)
      ldiff=clpost-oldll-lndMvn(oldbeta,betabar,rootpi)
      alpha=min(1,exp(ldiff))
@@ -261,7 +264,7 @@ betainit=c(rep(0,nvar))
 out=optim(betainit,llmnl,method="BFGS",control=list( fnscale=-1,trace=0,reltol=1e-6), 
      X=Xpooled,y=ypooled)
 betapooled=out$par
-H=mnlHess(ypooled,Xpooled,betapooled)
+H=mnlHess(betapooled,ypooled,Xpooled)
 rootH=chol(H)
 for (i in 1:nlgt) 
 {
@@ -269,7 +272,7 @@ for (i in 1:nlgt)
    out=optim(betapooled,llmnlFract,method="BFGS",control=list( fnscale=-1,trace=0,reltol=1e-4), 
    X=lgtdata[[i]]$X,y=lgtdata[[i]]$y,betapooled=betapooled,rootH=rootH,w=w)
    if(out$convergence == 0) 
-     { hess=mnlHess(lgtdata[[i]]$y,lgtdata[[i]]$X,out$par)
+     { hess=mnlHess(out$par,lgtdata[[i]]$y,lgtdata[[i]]$X)
        lgtdata[[i]]=c(lgtdata[[i]],list(converge=1,betafmle=out$par,hess=hess)) }
    else
      { lgtdata[[i]]=c(lgtdata[[i]],list(converge=0,betafmle=c(rep(0,nvar)),
@@ -336,7 +339,7 @@ for(rep in 1:R)
          else {
             betabar=oldcomp[[ind[lgt]]]$mu }
          if (rep == 1) 
-            { oldll[lgt]=llmnl(lgtdata[[lgt]]$y,lgtdata[[lgt]]$X,oldbetas[lgt,])}  
+            { oldll[lgt]=llmnl(oldbetas[lgt,],lgtdata[[lgt]]$y,lgtdata[[lgt]]$X)}  
          #   compute inc.root
          inc.root=chol(chol2inv(chol(lgtdata[[lgt]]$hess+rootpi%*%t(rootpi))))
          metropout=mnlRwMetropOnce(lgtdata[[lgt]]$y,lgtdata[[lgt]]$X,oldbetas[lgt,],
@@ -360,12 +363,15 @@ for(rep in 1:R)
    if((mkeep*keep) == (floor(mkeep)*keep))
       { betadraw[,,mkeep]=oldbetas 
         probdraw[mkeep,]=oldprob
+        loglike[mkeep]=sum(oldll)
         if(drawdelta) Deltadraw[mkeep,]=olddelta
         compdraw[[mkeep]]=oldcomp }
         
 }
 ctime=proc.time()[3]
 cat(" Total Time Elapsed: ",round((ctime-itime)/60,2),fill=TRUE)
-return(if(drawdelta) {list(Deltadraw=Deltadraw,betadraw=betadraw,probdraw=probdraw,compdraw=compdraw)} 
-else {list(betadraw=betadraw,probdraw=probdraw,compdraw=compdraw)})
+return(if(drawdelta) 
+   {list(Deltadraw=Deltadraw,betadraw=betadraw,probdraw=probdraw,compdraw=compdraw,loglike=loglike)} 
+else 
+   {list(betadraw=betadraw,probdraw=probdraw,compdraw=compdraw,loglike=loglike)})
 }
diff --git a/R/rhierNegbinRw.R b/R/rhierNegbinRw.R
index 6e44821..9050b14 100755
--- a/R/rhierNegbinRw.R
+++ b/R/rhierNegbinRw.R
@@ -4,6 +4,7 @@ function(Data, Prior, Mcmc) {
 #   Revision History
 #	  Sridhar Narayanan - 05/2005
 #         P. Rossi 6/05
+#         fixed error with nobs not specified and changed llnegbinFract 9/05
 #
 #   Model
 #       (y_i|lambda_i,alpha) ~ Negative Binomial(Mean = lambda_i, Overdispersion par = alpha)
@@ -45,7 +46,7 @@ function(Data, Prior, Mcmc) {
 #
 
 llnegbin = 
-function(par,X,y, nvar, nreg) {
+function(par,X,y, nvar) {
 # Computes the log-likelihood
     beta = par[1:nvar]
     alpha = exp(par[nvar+1])
@@ -53,11 +54,11 @@ function(par,X,y, nvar, nreg) {
 }
 
 llnegbinFract = 
-function(par,X,y,Xpooled, ypooled, power, nvar, nreg) {
+function(par,X,y,Xpooled, ypooled, power, nvar,lnalpha)  {
 # Computes the fractional log-likelihood at the unit level
 #    = l_i * l_bar^power
-    par = c(par,mle$par[nvar+1])
-    llnegbin(par,X,y,nvar,nreg) + power*llnegbin(par,Xpooled,ypooled, nvar, nreg) 
+    theta = c(par,lnalpha)
+    llnegbin(theta,X,y,nvar) + power*llnegbin(theta,Xpooled,ypooled, nvar) 
 }
 
 lpostbetai = 
@@ -124,6 +125,7 @@ for (i in 1:nreg) {
     ypooled = c(ypooled,regdata[[i]]$y)
     Xpooled = rbind(Xpooled,regdata[[i]]$X)
 }
+nobs= length(ypooled)
 
 nvar=ncol(Xpooled)
 #
@@ -165,7 +167,7 @@ if(is.null(Mcmc$s_beta)) {cat("Using default s_beta = 2.93/sqrt(nvar)",fill=TRUE
 if(is.null(Mcmc$c)) {cat("Using default c = 2"); c=2} 
     else {c = Mcmc$c}
 
-#
+#out = rhierNegbinRw(Data, Prior, Mcmc)
 # print out problem
 #
 cat(" ",fill=TRUE)
@@ -197,7 +199,8 @@ cat(" ",fill=TRUE)
 par = rep(0,(nvar+1))
 cat("initializing Metropolis candidate densities for ",nreg,"units ...",fill=TRUE)
 fsh()
-mle = optim(par,llnegbin, X=Xpooled, y=ypooled, nvar=nvar, nreg=nreg, method="L-BFGS-B", upper=c(Inf,Inf,Inf,log(100000000)), hessian=TRUE, control=list(fnscale=-1))
+mle = optim(par,llnegbin, X=Xpooled, y=ypooled, nvar=nvar, 
+      method="L-BFGS-B", upper=c(Inf,Inf,Inf,log(100000000)), hessian=TRUE, control=list(fnscale=-1))
 fsh()
 beta_mle=mle$par[1:nvar]
 alpha_mle = exp(mle$par[nvar+1])
@@ -217,7 +220,9 @@ hess_i=NULL
 # Find the individual candidate hessian
 for (i in 1:nreg) {
     power = length(regdata[[i]]$y)/(c*nobs)
-    mle2 = optim(mle$par[1:nvar],llnegbinFract, X=regdata[[i]]$X, y=regdata[[i]]$y, Xpooled=Xpooled, ypooled=ypooled, power=power, nvar=nvar, nreg=nreg, method="BFGS", hessian=TRUE, control=list(fnscale=-1, trace=0))
+    mle2 = optim(mle$par[1:nvar],llnegbinFract, X=regdata[[i]]$X, y=regdata[[i]]$y, Xpooled=Xpooled, 
+           ypooled=ypooled, power=power, nvar=nvar, lnalpha=mle$par[nvar+1], 
+           method="BFGS", hessian=TRUE, control=list(fnscale=-1, trace=0))
     if (mle2$convergence==0)
         hess_i[[i]] = list(hess=-mle2$hessian)
     else
diff --git a/R/rmixGibbs.R b/R/rmixGibbs.R
index af4f416..cdb753e 100755
--- a/R/rmixGibbs.R
+++ b/R/rmixGibbs.R
@@ -11,6 +11,9 @@ function(y,Bbar,A,nu,V,a,p,z,comps)
 # arguments:
 #     y: data, rows are observations, assumed to be iid draws from normal mixture
 #     Bbar,A,nu,V: common prior for mean and variance of each normal component
+#
+#     note: Bbar should be a matrix. usually with only one row
+#
 #        beta ~ N(betabar,Sigma (x) A^-1)
 #                   betabar=vec(Bbar)
 #          Sigma ~ IW(nu,V) or Sigma^-1 ~ W(nu, V^-1)
diff --git a/R/rmnlIndepMetrop.R b/R/rmnlIndepMetrop.R
index 313c978..effe9e2 100755
--- a/R/rmnlIndepMetrop.R
+++ b/R/rmnlIndepMetrop.R
@@ -5,6 +5,7 @@ function(Data,Prior,Mcmc)
 # revision history:
 #   p. rossi 1/05
 #   2/9/05 fixed error in Metrop eval
+#   changed to reflect new argument order in llmnl,mnlHess 9/05
 #
 # purpose: 
 #   draw from posterior for MNL using Independence Metropolis
@@ -46,6 +47,9 @@ nobs=length(y)
 # check data for validity
 #
 if(length(y) != (nrow(X)/p) ) {pandterm("length(y) ne nrow(X)/p")}
+if(sum(y %in% (1:p)) < nobs) {pandterm("invalid values in y vector -- must be integers in 1:p")}
+cat(" table of y values",fill=TRUE)
+print(table(y))
 #
 # check for Prior
 #
@@ -101,7 +105,7 @@ beta=c(rep(0,nvar))
 mle=optim(beta,llmnl,X=X,y=y,method="BFGS",hessian=TRUE,control=list(fnscale=-1))
 beta=mle$par
 betastar=mle$par
-mhess=mnlHess(y,X,beta)
+mhess=mnlHess(beta,y,X)
 candcov=chol2inv(chol(mhess))
 root=chol(candcov)
 rooti=backsolve(root,diag(nvar))
@@ -116,7 +120,7 @@ itime=proc.time()[3]
 cat("MCMC Iteration (est time to end - min) ",fill=TRUE)
 fsh()
 
-oldlpost=llmnl(y,X,beta)+lndMvn(beta,betabar,rootpi)
+oldlpost=llmnl(beta,y,X)+lndMvn(beta,betabar,rootpi)
 oldlimp=lndMvst(beta,nu,betastar,rooti)
 #       note: we don't need the determinants as they cancel in
 #       computation of acceptance prob
@@ -125,7 +129,7 @@ naccept=0
 for (rep in 1:R) 
 {
    betac=rmvst(nu,betastar,root)
-   clpost=llmnl(y,X,betac)+lndMvn(betac,betabar,rootpi)
+   clpost=llmnl(betac,y,X)+lndMvn(betac,betabar,rootpi)
    climp=lndMvst(betac,nu,betastar,rooti)
    ldiff=clpost+oldlimp-oldlpost-climp
    alpha=min(1,exp(ldiff))
diff --git a/R/rnmixGibbs.R b/R/rnmixGibbs.R
index 5d464d6..07eff16 100755
--- a/R/rnmixGibbs.R
+++ b/R/rnmixGibbs.R
@@ -4,6 +4,8 @@ function(Data,Prior,Mcmc)
 #
 # Revision History: 
 #   P. Rossi 3/05
+#   add check to see if Mubar is a vector  9/05
+#   fixed bugging in saving comps draw comps[[mkeep]]=  9/05
 #
 # purpose: do Gibbs sampling inference for a mixture of multivariate normals
 #
@@ -50,13 +52,13 @@ dimy=ncol(y)
 #
 # check for Prior
 #
-if(missing(Prior)) {pandterm("requires Prior argument <min of Prior$ncomp>")}
+if(missing(Prior)) {pandterm("requires Prior argument ")}
 else
    {
     if(is.null(Prior$ncomp)) {pandterm("requires number of mix comps -- Prior$ncomp")}
        else {ncomp=Prior$ncomp}
     if(is.null(Prior$Mubar)) {Mubar=matrix(rep(0,dimy),nrow=1)} 
-       else {Mubar=Prior$Mubar}
+       else {Mubar=Prior$Mubar; if(is.vector(Mubar)) {Mubar=matrix(Mubar,nrow=1)}}
     if(is.null(Prior$A)) {A=matrix(c(.01),ncol=1)} 
        else {A=Prior$A}
     if(is.null(Prior$nu)) {nu=dimy+2} 
@@ -153,7 +155,7 @@ for(rep in 1:R)
       mkeep=rep/keep
       pdraw[mkeep,]=p
       zdraw[mkeep,]=z
-      compdraw[[rep]]=compsd
+      compdraw[[mkeep]]=compsd
       }
 }
 return(list(probdraw=pdraw,zdraw=zdraw,compdraw=compdraw))
diff --git a/R/rsurGibbs.R b/R/rsurGibbs.R
new file mode 100755
index 0000000..666cc84
--- /dev/null
+++ b/R/rsurGibbs.R
@@ -0,0 +1,183 @@
+rsurGibbs=
+function(Data,Prior,Mcmc)
+{
+# 
+# revision history:
+#          P. Rossi 9/05
+# Purpose:
+#   implement Gibbs Sampler for SUR
+# 
+# Arguments:
+#   Data -- regdata
+#           regdata is a list of lists of data for each regression
+#           regdata[[i]] contains data for regression equation i
+#           regdata[[i]]$y is y, regdata[[i]]$X is X
+#           note: each regression can have differing numbers of X vars
+#                 but you must have same no of obs in each equation. 
+#   Prior -- list of prior hyperparameters
+#     betabar,A      prior mean, prior precision
+#     nu, V          prior on Sigma
+#   Mcmc -- list of MCMC parms
+#     R number of draws
+#     keep -- thinning parameter
+# 
+# Output: 
+#   list of betadraw,Sigmadraw
+#
+# Model:
+#   y_i = X_ibeta + e_i  
+#          y is nobs x 1
+#          X is nobs x k_i
+#          beta is k_i x 1 vector of coefficients
+#          i=1,nreg total regressions
+#
+#         (e_1,k,...,e_nreg,k) ~ N(0,Sigma) k=1,...,nobs
+#
+#   we can also write as stacked regression
+#   y = Xbeta+e
+#       y is nobs*nreg x 1,X is nobs*nreg x (sum(k_i))
+#   routine draws beta -- the stacked vector of all coefficients
+#
+# Priors:  beta ~ N(betabar,A^-1)
+#          Sigma ~ IW(nu,V)
+# 
+#
+# check arguments
+#
+pandterm=function(message) {stop(message,call.=FALSE)}
+if(missing(Data)) {pandterm("Requires Data argument -- list of regdata")}
+    if(is.null(Data$regdata)) {pandterm("Requires Data element regdata")}
+    regdata=Data$regdata
+#
+# check regdata for validity
+#
+nreg=length(regdata)
+nobs=length(regdata[[1]]$y)
+nvar=0
+indreg=double(nreg+1)
+y=NULL
+for (reg in 1:nreg) {
+   if(length(regdata[[reg]]$y) != nobs || nrow(regdata[[reg]]$X) != nobs)
+      {pandterm(paste("incorrect dimensions for regression",reg))}
+   else
+      {indreg[reg]=nvar+1
+       nvar=nvar+ncol(regdata[[reg]]$X); y=c(y,regdata[[reg]]$y)}
+} 
+indreg[nreg+1]=nvar+1
+#
+# check for Prior
+#
+if(missing(Prior))
+   { betabar=c(rep(0,nvar)); A=.01*diag(nvar); nu=nreg+3; V=nu*diag(nreg)}
+else
+   {
+    if(is.null(Prior$betabar)) {betabar=c(rep(0,nvar))} 
+       else {betabar=Prior$betabar}
+    if(is.null(Prior$A)) {A=.01*diag(nvar)} 
+       else {A=Prior$A}
+    if(is.null(Prior$nu)) {nu=nreg+3}
+       else {nu=Prior$nu}
+    if(is.null(Prior$V)) {V=nu*diag(nreg)}
+       else {ssq=Prior$V}
+   }
+#
+# check dimensions of Priors
+#
+if(ncol(A) != nrow(A) || ncol(A) != nvar || nrow(A) != nvar) 
+   {pandterm(paste("bad dimensions for A",dim(A)))}
+if(length(betabar) != nvar)
+   {pandterm(paste("betabar wrong length, length= ",length(betabar)))}
+#
+# check MCMC argument
+#
+if(missing(Mcmc)) {pandterm("requires Mcmc argument")}
+else
+   {
+    if(is.null(Mcmc$R)) 
+       {pandterm("requires Mcmc element R")} else {R=Mcmc$R}
+    if(is.null(Mcmc$keep)) {keep=1} else {keep=Mcmc$keep}
+   }
+#
+# print out problem
+#
+cat(" ", fill=TRUE)
+cat("Starting Gibbs Sampler for SUR Regression Model",fill=TRUE)
+cat("  with ",nreg," regressions",fill=TRUE)
+cat("  and  ",nobs," observations for each regression",fill=TRUE)
+cat(" ", fill=TRUE)
+cat("Prior Parms: ",fill=TRUE)
+cat("betabar",fill=TRUE)
+print(betabar)
+cat("A",fill=TRUE)
+print(A)
+cat("nu = ",nu,fill=TRUE)
+cat("V = ",fill=TRUE)
+print(V)
+cat(" ", fill=TRUE)
+cat("MCMC parms: ",fill=TRUE)
+cat("R= ",R," keep= ",keep,fill=TRUE)
+cat(" ",fill=TRUE)
+
+Sigmadraw=matrix(double(floor(R*nreg*nreg/keep)),ncol=nreg*nreg)
+betadraw=matrix(double(floor(R*nvar/keep)),ncol=nvar)
+
+
+#
+# set initial value of Sigma
+#
+E=matrix(double(nobs*nreg),ncol=nreg)
+for (reg in 1:nreg) {
+    E[,reg]=lm(y~.-1,data=data.frame(y=regdata[[reg]]$y,regdata[[reg]]$X))$residuals
+}
+Sigma=crossprod(E)/nobs
+L=t(backsolve(chol(Sigma),diag(nreg)))
+Y=y
+dim(Y)=c(nobs,nreg)
+Xti=matrix(0,ncol=nvar,nrow=nreg*nobs)
+
+itime=proc.time()[3]
+cat("MCMC Iteration (est time to end - min) ",fill=TRUE)
+fsh()
+
+for (rep in 1:R)
+{
+#
+#     first draw beta | Sigma
+#
+# compute Xtilde
+#
+  for (reg in 1:nreg){
+     Xti[,indreg[reg]:(indreg[reg+1]-1)]=L[,reg]%x%regdata[[reg]]$X
+  }
+  IR=backsolve(chol(crossprod(Xti)+A),diag(nvar))
+#
+# compute ytilde
+  yti=as.vector(Y%*%t(L))
+  btilde=crossprod(t(IR))%*%(crossprod(Xti,yti)+A%*%betabar)
+  beta = btilde + IR%*%rnorm(nvar)
+#
+#    now draw Sigma | beta
+#
+  for(reg in 1:nreg){
+     E[,reg]=regdata[[reg]]$y-regdata[[reg]]$X%*%beta[indreg[reg]:(indreg[reg+1]-1)]
+  }
+  Sigma=rwishart(nu+nobs,chol2inv(chol(crossprod(E)+V)))$IW
+  L=t(backsolve(chol(Sigma),diag(nreg)))
+#
+#       print time to completion and draw # every 100th draw
+#
+  if(rep%%100 == 0)
+    {ctime=proc.time()[3]
+    timetoend=((ctime-itime)/rep)*(R-rep)
+    cat(" ",rep," (",round(timetoend/60,1),")",fill=TRUE)
+    fsh()}
+
+  if(rep%%keep == 0) 
+    {mkeep=rep/keep; betadraw[mkeep,]=beta; Sigmadraw[mkeep,]=Sigma}
+}
+ctime = proc.time()[3]
+cat('  Total Time Elapsed: ',round((ctime-itime)/60,2),'\n')
+
+
+return(list(betadraw=betadraw,Sigmadraw=Sigmadraw))
+}
diff --git a/R/runireg.R b/R/runireg.R
index 8651ff5..5f26e61 100755
--- a/R/runireg.R
+++ b/R/runireg.R
@@ -1,35 +1,118 @@
 runireg=
-function(y,X,betabar,A,nu,ssq){
-#   
-#    revision history:
-#      p. rossi 1/11/05 -- fix error in sum of squares
-#  
-# purpose: 
-#          draw from posterior for a univariate regression model
-#          with natural conjugate prior
-#
-# arguments: 
-#      y is n x 1
-#      X is n x k
-#      betabar is prior mean 
-#      A is prior precision
-#      nu, ssq are parameters of prior on sigmasq
-# output:
-#      list of beta, sigmasq draws
-#         beta is k x 1 vector of coefficients
-# model:
-#      Y=Xbeta+e  var(e_i) = sigmasq
-#      priors:  beta| sigmasq ~ N(betabar,sigmasq*A^-1)
-#                sigmasq ~ (nu*ssq)/chisq_nu
-n=length(y)
-k=ncol(X)
+function(Data,Prior,Mcmc)
+{
+# 
+# revision history:
+#          P. Rossi 1/17/05
+#          revised 9/05 to put in Data,Prior,Mcmc calling convention
+# Purpose:
+#   perform iid draws from posterior of regression model using
+#     conjugate prior
+# 
+# Arguments:
+#   Data -- list of data 
+#           y,X
+#   Prior -- list of prior hyperparameters
+#     betabar,A      prior mean, prior precision
+#     nu, ssq        prior on sigmasq
+#   Mcmc -- list of MCMC parms
+#     R number of draws
+#     keep -- thinning parameter
+# 
+# Output: 
+#   list of beta, sigmasq
+#
+# Model:
+#   y = Xbeta + e  e ~N(0,sigmasq)
+#          y is n x 1
+#          X is n x k
+#          beta is k x 1 vector of coefficients
+#
+# Priors:  beta ~ N(betabar,sigmasq*A^-1)
+#          sigmasq ~ (nu*ssq)/chisq_nu
+# 
+#
+# check arguments
+#
+pandterm=function(message) {stop(message,call.=FALSE)}
+if(missing(Data)) {pandterm("Requires Data argument -- list of y and X")}
+    if(is.null(Data$X)) {pandterm("Requires Data element X")}
+    X=Data$X
+    if(is.null(Data$y)) {pandterm("Requires Data element y")}
+    y=Data$y
+nvar=ncol(X)
+nobs=length(y)
+#
+# check data for validity
+#
+if(nobs != nrow(X) ) {pandterm("length(y) ne nrow(X)")}
+#
+# check for Prior
+#
+if(missing(Prior))
+   { betabar=c(rep(0,nvar)); A=.01*diag(nvar); nu=3; ssq=var(y)}
+else
+   {
+    if(is.null(Prior$betabar)) {betabar=c(rep(0,nvar))} 
+       else {betabar=Prior$betabar}
+    if(is.null(Prior$A)) {A=.01*diag(nvar)} 
+       else {A=Prior$A}
+    if(is.null(Prior$nu)) {nu=3}
+       else {nu=Prior$nu}
+    if(is.null(Prior$ssq)) {ssq=var(y)}
+       else {ssq=Prior$ssq}
+   }
+#
+# check dimensions of Priors
+#
+if(ncol(A) != nrow(A) || ncol(A) != nvar || nrow(A) != nvar) 
+   {pandterm(paste("bad dimensions for A",dim(A)))}
+if(length(betabar) != nvar)
+   {pandterm(paste("betabar wrong length, length= ",length(betabar)))}
+#
+# check MCMC argument
+#
+if(missing(Mcmc)) {pandterm("requires Mcmc argument")}
+else
+   {
+    if(is.null(Mcmc$R)) 
+       {pandterm("requires Mcmc element R")} else {R=Mcmc$R}
+    if(is.null(Mcmc$keep)) {keep=1} else {keep=Mcmc$keep}
+   }
+#
+# print out problem
+#
+cat(" ", fill=TRUE)
+cat("Starting IID Sampler for Univariate Regression Model",fill=TRUE)
+cat("  with ",nobs," observations",fill=TRUE)
+cat(" ", fill=TRUE)
+cat("Prior Parms: ",fill=TRUE)
+cat("betabar",fill=TRUE)
+print(betabar)
+cat("A",fill=TRUE)
+print(A)
+cat("nu = ",nu," ssq= ",ssq,fill=TRUE)
+cat(" ", fill=TRUE)
+cat("MCMC parms: ",fill=TRUE)
+cat("R= ",R," keep= ",keep,fill=TRUE)
+cat(" ",fill=TRUE)
+
+sigmasqdraw=double(floor(Mcmc$R/keep))
+betadraw=matrix(double(floor(Mcmc$R*nvar/keep)),ncol=nvar)
+
+itime=proc.time()[3]
+cat("IID Iteration (est time to end - min) ",fill=TRUE)
+fsh()
+
+for (rep in 1:Mcmc$R){
+
 #
 # first draw Sigma
 #
 RA=chol(A)
 W=rbind(X,RA)
 z=c(y,as.vector(RA%*%betabar))
-IR=backsolve(chol(crossprod(W)),diag(k))
+IR=backsolve(chol(crossprod(W)),diag(nvar))
 #      W'W=R'R ;  (W'W)^-1 = IR IR'  -- this is UL decomp
 btilde=crossprod(t(IR))%*%crossprod(W,z)
 res=z-W%*%btilde
@@ -38,10 +121,24 @@ s=t(res)%*%res
 # first draw Sigma
 #
 #
-sigmasq=(nu*ssq + s)/rchisq(1,nu+n)
+sigmasq=(nu*ssq + s)/rchisq(1,nu+nobs)
 #
 # now draw beta given Sigma
 #	
-beta = btilde + as.vector(sqrt(sigmasq))*IR%*%rnorm(k)
-list(beta=beta,sigmasq=sigmasq)
+beta = btilde + as.vector(sqrt(sigmasq))*IR%*%rnorm(nvar)
+#
+#       print time to completion and draw # every 100th draw
+#
+  if(rep%%100 == 0)
+    {ctime=proc.time()[3]
+    timetoend=((ctime-itime)/rep)*(R-rep)
+    cat(" ",rep," (",round(timetoend/60,1),")",fill=TRUE)
+    fsh()}
+
+  if(rep%%keep == 0) 
+    {mkeep=rep/keep; betadraw[mkeep,]=beta; sigmasqdraw[mkeep]=sigmasq}
+}
+ctime = proc.time()[3]
+cat('  Total Time Elapsed: ',round((ctime-itime)/60,2),'\n')
+return(list(betadraw=betadraw,sigmasqdraw=sigmasqdraw))
 }
diff --git a/R/runiregGibbs.R b/R/runiregGibbs.R
index 5bf6e94..83d09ff 100755
--- a/R/runiregGibbs.R
+++ b/R/runiregGibbs.R
@@ -50,7 +50,7 @@ if(nobs != nrow(X) ) {pandterm("length(y) ne nrow(X)")}
 # check for Prior
 #
 if(missing(Prior))
-   { betabar=c(rep(0,nvar)); A=.01*diag(nvar)}
+   { betabar=c(rep(0,nvar)); A=.01*diag(nvar); nu=3; ssq=var(y)}
 else
    {
     if(is.null(Prior$betabar)) {betabar=c(rep(0,nvar))} 
diff --git a/inst/doc/bayesm-manual.pdf b/inst/doc/bayesm-manual.pdf
index bfcc87b..15557ff 100755
Binary files a/inst/doc/bayesm-manual.pdf and b/inst/doc/bayesm-manual.pdf differ
diff --git a/man/bank.Rd b/man/bank.Rd
index f4ff8fc..23e64af 100755
--- a/man/bank.Rd
+++ b/man/bank.Rd
@@ -53,7 +53,7 @@
   Markets," \emph{JMR}, 392-403.
 }
 \references{ Appendix A, \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi,Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 \examples{
diff --git a/man/breg.Rd b/man/breg.Rd
index 793951f..7ca09f3 100755
--- a/man/breg.Rd
+++ b/man/breg.Rd
@@ -31,7 +31,7 @@ breg(y, X, betabar, A)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi,Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -42,6 +42,9 @@ breg(y, X, betabar, A)
 \section{Warning}{
   This routine is a utility routine that does \strong{not} check the
   input arguments for proper dimensions and type.
+
+  In particular, X must be a matrix. If you have a vector for X, coerce it
+  into a matrix with one column
 }
 
 \examples{
diff --git a/man/clusterMix.Rd b/man/clusterMix.Rd
new file mode 100755
index 0000000..7aee099
--- /dev/null
+++ b/man/clusterMix.Rd
@@ -0,0 +1,88 @@
+\name{clusterMix}
+\alias{clusterMix}
+\concept{normal mixture}
+\concept{clustering}
+\title{ Cluster Observations Based on Indicator MCMC Draws }
+\description{
+  \code{clusterMix} uses MCMC draws of indicator variables from a normal
+  component mixture model to cluster observations based on a similarity matrix.
+}
+\usage{
+clusterMix(zdraw, cutoff = 0.9, SILENT = FALSE)
+}
+\arguments{
+  \item{zdraw}{ R x nobs array of draws of indicators }
+  \item{cutoff}{ cutoff probability for similarity  (def=.9)}
+  \item{SILENT}{ logical flag for silent operation (def= FALSE) }
+}
+\details{
+
+   define a similarity matrix, Sim, Sim[i,j]=1 if observations i and j are in same component.
+   Compute the posterior mean of Sim over indicator draws.
+
+   clustering is achieved by two means:
+
+   Method A:
+   Find the indicator draw whose similarity matrix minimizes, loss(E[Sim]-Sim(z)),  
+   where loss is absolute deviation.
+
+   Method B:
+   Define a Similarity matrix by setting any element of E[Sim] = 1 if E[Sim] > cutoff.
+   Compute the clustering scheme associated with this "windsorized" Similarity matrix.
+}
+\value{
+  \item{clustera}{indicator function for clustering based on method A above}
+  \item{clusterb}{indicator function for clustering based on method B above}
+}
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi, Allenby and McCulloch Chapter 3. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Peter Rossi, Graduate School of Business, University of Chicago
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+
+\section{Warning}{
+  This routine is a utility routine that does \strong{not} check the
+  input arguments for proper dimensions and type.
+}
+\seealso{ \code{\link{rnmixGibbs}}  }
+
+\keyword{ models }
+\keyword{ multivariate }
+\examples{
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0) 
+{
+## simulate data from mixture of normals
+n=500
+pvec=c(.5,.5)
+mu1=c(2,2)
+mu2=c(-2,-2)
+Sigma1=matrix(c(1,.5,.5,1),ncol=2)
+Sigma2=matrix(c(1,.5,.5,1),ncol=2)
+comps=NULL
+comps[[1]]=list(mu1,backsolve(chol(Sigma1),diag(2)))
+comps[[2]]=list(mu2,backsolve(chol(Sigma2),diag(2)))
+dm=rmixture(n,pvec,comps)
+## run MCMC on normal mixture
+R=2000
+Data=list(y=dm$x)
+ncomp=2
+Prior=list(ncomp=ncomp,a=c(rep(100,ncomp)))
+Mcmc=list(R=R,keep=1)
+out=rnmixGibbs(Data=Data,Prior=Prior,Mcmc=Mcmc)
+begin=500
+end=R
+## find clusters
+outclusterMix=clusterMix(out$zdraw[begin:end,])
+##
+## check on clustering versus "truth"
+##  note: there could be switched labels
+##
+table(outclusterMix$clustera,dm$z)
+table(outclusterMix$clusterb,dm$z)
+}
+##
+}
diff --git a/man/condMom.Rd b/man/condMom.Rd
index 84621d2..656857c 100755
--- a/man/condMom.Rd
+++ b/man/condMom.Rd
@@ -29,7 +29,7 @@ condMom(x, mu, sigi, i)
   \item{cvar }{ cond variance}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/createX.Rd b/man/createX.Rd
index e3a53c6..e34db70 100755
--- a/man/createX.Rd
+++ b/man/createX.Rd
@@ -19,7 +19,7 @@ createX(p, na, nd, Xa, Xd, INT = TRUE, DIFF = FALSE, base = p)
 \arguments{
   \item{p}{ integer - number of choice alternatives }
   \item{na}{ integer - number of alternative-specific vars in Xa  }
-  \item{nd}{ integer - number of non-alternive specific vars }
+  \item{nd}{ integer - number of non-alternative specific vars }
   \item{Xa}{ n x p*na matrix of alternative-specific vars }
   \item{Xd}{ n x nd matrix of non-alternative specific vars }
   \item{INT}{ logical flag for inclusion of intercepts }
@@ -33,7 +33,7 @@ createX(p, na, nd, Xa, Xd, INT = TRUE, DIFF = FALSE, base = p)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/eMixMargDen.Rd b/man/eMixMargDen.Rd
index d49c819..1337063 100755
--- a/man/eMixMargDen.Rd
+++ b/man/eMixMargDen.Rd
@@ -33,7 +33,7 @@ eMixMargDen(grid, probdraw, compdraw)
   an array of the same dimension as grid with density values.
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/ghkvec.Rd b/man/ghkvec.Rd
index 651a09e..f77c1bf 100755
--- a/man/ghkvec.Rd
+++ b/man/ghkvec.Rd
@@ -1,6 +1,7 @@
 \name{ghkvec}
 \alias{ghkvec}
 \concept{multivariate normal distribution}
+\concept{GHK method}
 \concept{integral}
 
 \title{ Compute GHK approximation to Multivariate Normal Integrals }
@@ -28,7 +29,7 @@ ghkvec(L, trunpt, above, r)
   example below for an example with two integrals at different truncation points.
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi,Allenby and McCulloch,  Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/init.rmultiregfp.Rd b/man/init.rmultiregfp.Rd
index c51bc7d..5067b22 100755
--- a/man/init.rmultiregfp.Rd
+++ b/man/init.rmultiregfp.Rd
@@ -32,7 +32,7 @@ init.rmultiregfp(X, A, Bbar, nu, V)
   \item{V}{location matrix for IWishart prior}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/llmnl.Rd b/man/llmnl.Rd
index 702a360..6daae28 100755
--- a/man/llmnl.Rd
+++ b/man/llmnl.Rd
@@ -8,13 +8,13 @@
   \code{llmnl} evaluates log-likelihood for the multinomial logit model.
 }
 \usage{
-llmnl(y, X, beta)
+llmnl(beta,y, X)
 }
 
 \arguments{
+  \item{beta}{ k x 1 coefficient vector }
   \item{y}{ n x 1 vector of obs on y (1,\ldots, p) }
   \item{X}{ n*p x k Design matrix (use \code{createX} to make) }
-  \item{beta}{ k x 1 coefficient vector }
 }
 \details{
   Let \eqn{mu_i=X_i \beta}, then \eqn{Pr(y_i=j) = exp(mu_{i,j})/\sum_kexp(mu_{i,k})}.\cr
@@ -27,7 +27,7 @@ llmnl(y, X, beta)
   value of log-likelihood (sum of log prob of observed multinomial outcomes).
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -44,7 +44,7 @@ llmnl(y, X, beta)
 
 \examples{
 ##
-\dontrun{ll=llmnl(y,X,beta)}
+\dontrun{ll=llmnl(beta,y,X)}
 }
 
 \keyword{ models }
diff --git a/man/llmnp.Rd b/man/llmnp.Rd
index abc7f4a..58324d4 100755
--- a/man/llmnp.Rd
+++ b/man/llmnp.Rd
@@ -1,6 +1,7 @@
 \name{llmnp}
 \alias{llmnp}
 \concept{multinomial probit}
+\concept{GHK method}
 \concept{likelihood}
 
 \title{ Evaluate Log Likelihood for Multinomial Probit Model  }
@@ -9,14 +10,14 @@
 }
 
 \usage{
-llmnp(X, y, beta, Sigma, r)
+llmnp(beta, Sigma, X, y, r)
 }
 
 \arguments{
-  \item{X}{ X is n*(p-1) x k array. X is from differenced system. }
-  \item{y}{ y is vector of n indicators of multinomial response (1, \ldots, p). }
   \item{beta}{ k x 1 vector of coefficients }
   \item{Sigma}{ (p-1) x (p-1) Covariance matrix of errors }
+  \item{X}{ X is n*(p-1) x k array. X is from differenced system. }
+  \item{y}{ y is vector of n indicators of multinomial response (1, \ldots, p). }
   \item{r}{ number of draws used in GHK }
 }
 
@@ -43,7 +44,7 @@ llmnp(X, y, beta, Sigma, r)
   value of log-likelihood (sum of log prob of observed multinomial outcomes).
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapters 2 and 4. \cr
+  by Rossi, Allenby and McCulloch, Chapters 2 and 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -60,7 +61,7 @@ llmnp(X, y, beta, Sigma, r)
 
 \examples{
 ##
-\dontrun{ll=llmnp(X,y,beta,Sigma,r)}
+\dontrun{ll=llmnp(beta,Sigma,X,y,r)}
 }
 
 \keyword{ models }
diff --git a/man/llnhlogit.Rd b/man/llnhlogit.Rd
index 6d1b0aa..15320f3 100755
--- a/man/llnhlogit.Rd
+++ b/man/llnhlogit.Rd
@@ -34,7 +34,7 @@ llnhlogit(theta, choice, lnprices, Xexpend)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi,Chapter 4. \cr
+  by Rossi, Allenby and McCulloch,Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/lndIChisq.Rd b/man/lndIChisq.Rd
index e60be72..1644249 100755
--- a/man/lndIChisq.Rd
+++ b/man/lndIChisq.Rd
@@ -23,7 +23,7 @@ lndIChisq(nu, ssq, x)
   log density value
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/lndIWishart.Rd b/man/lndIWishart.Rd
index 7461505..538e8b4 100755
--- a/man/lndIWishart.Rd
+++ b/man/lndIWishart.Rd
@@ -23,7 +23,7 @@ lndIWishart(nu, S, IW)
   log density value
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/lndMvn.Rd b/man/lndMvn.Rd
index d74c40c..ddfc49e 100755
--- a/man/lndMvn.Rd
+++ b/man/lndMvn.Rd
@@ -26,7 +26,7 @@ lndMvn(x, mu, rooti)
   log density value
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/lndMvst.Rd b/man/lndMvst.Rd
index 4fe2feb..b51d9f7 100755
--- a/man/lndMvst.Rd
+++ b/man/lndMvst.Rd
@@ -27,7 +27,7 @@ lndMvst(x, nu, mu, rooti)
   log density value
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/logMargDenNR.Rd b/man/logMargDenNR.Rd
index 5d3893c..3b6a5fe 100755
--- a/man/logMargDenNR.Rd
+++ b/man/logMargDenNR.Rd
@@ -21,7 +21,7 @@ logMargDenNR(ll)
   approximation to log marginal density value.
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 6. \cr
+  by Rossi, Allenby and McCulloch, Chapter 6. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 \author{ Peter Rossi, Graduate School of Business, University of Chicago,
diff --git a/man/mixDen.Rd b/man/mixDen.Rd
index d215120..048c978 100755
--- a/man/mixDen.Rd
+++ b/man/mixDen.Rd
@@ -27,7 +27,7 @@ mixDen(x, pvec, comps)
   an array of the same dimension as grid with density values.
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -42,5 +42,13 @@ mixDen(x, pvec, comps)
 
 \seealso{ \code{\link{rnmixGibbs}}  }
 
+\examples{
+\dontrun{
+##
+##  see examples in rnmixGibbs documentation
+##
+}
+}
+
 \keyword{ models }
 \keyword{ multivariate }
diff --git a/man/mixDenBi.Rd b/man/mixDenBi.Rd
new file mode 100755
index 0000000..6b47a34
--- /dev/null
+++ b/man/mixDenBi.Rd
@@ -0,0 +1,58 @@
+\name{mixDenBi}
+\alias{mixDenBi}
+\concept{normal mixture}
+\concept{marginal distribution}
+\concept{density}
+
+\title{ Compute Bivariate Marginal Density for a Normal Mixture }
+\description{
+  \code{mixDenBi} computes the implied bivariate marginal density from a mixture of
+  normals with specified mixture probabilities and component parameters. 
+}
+\usage{
+mixDenBi(i, j, xi, xj, pvec, comps)
+}
+\arguments{
+  \item{i}{ index of first variable }
+  \item{j}{ index of second variable }
+  \item{xi}{ grid of values of first variable }
+  \item{xj}{ grid of values of second variable }
+  \item{pvec}{ normal mixture probabilities }
+  \item{comps}{ list of lists of components }
+}
+\details{
+  length(comps) is the number of mixture components.  comps[[j]] is a list of
+  parameters of the jth component. comps[[j]]\$mu is mean vector; comps[[j]]\$rooti
+  is the UL decomp of \eqn{Sigma^{-1}}.
+}
+
+\value{
+  an array (length(xi)=length(xj) x 2) with density value
+}
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Peter Rossi, Graduate School of Business, University of Chicago
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+
+\section{Warning}{
+  This routine is a utility routine that does \strong{not} check the
+  input arguments for proper dimensions and type.
+}
+
+\seealso{ \code{\link{rnmixGibbs}}, \code{\link{mixDen}}  }
+
+\examples{
+\dontrun{
+##
+##  see examples in rnmixGibbs documentation
+##
+}
+}
+
+\keyword{ models }
+\keyword{ multivariate }
+
diff --git a/man/mnlHess.Rd b/man/mnlHess.Rd
index 84674d0..f09b43e 100755
--- a/man/mnlHess.Rd
+++ b/man/mnlHess.Rd
@@ -9,12 +9,12 @@
   \code{mnlHess} computes -Expected[Hessian] for Multinomial Logit Model
 }
 \usage{
-mnlHess(y, X, beta)
+mnlHess(beta,y, X)
 }
 \arguments{
+  \item{beta}{ k x 1 vector of coefficients }
   \item{y}{ n x 1 vector of choices, (1, \ldots,p) }
   \item{X}{ n*p x k Design matrix }
-  \item{beta}{ k x 1 vector of coefficients }
 }
 \details{
   See \code{\link{llmnl}} for information on structure of X array.  Use \code{\link{createX}} to make X.
@@ -23,7 +23,7 @@ mnlHess(y, X, beta)
   k x k matrix
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -39,6 +39,6 @@ mnlHess(y, X, beta)
 \seealso{ \code{\link{llmnl}}, \code{\link{createX}}, \code{\link{rmnlIndepMetrop}} }
 \examples{
 ##
-\dontrun{mnlHess(y,X,beta)}
+\dontrun{mnlHess(beta,y,X)}
 }
 \keyword{ models }
diff --git a/man/mnpProb.Rd b/man/mnpProb.Rd
new file mode 100755
index 0000000..cb27d53
--- /dev/null
+++ b/man/mnpProb.Rd
@@ -0,0 +1,50 @@
+\name{mnpProb}
+\alias{mnpProb}
+\concept{MNP}
+\concept{Multinomial Probit Model}
+\concept{GHK}
+\concept{market share simulator}
+\title{ Compute MNP Probabilities }
+\description{
+  \code{mnpProb} computes MNP probabilities for a given X matrix corresponding to one 
+   observation.  This function can be used with output from \code{rmnpGibbs} to simulate
+   the posterior distribution of market shares or fitted probabilties.
+}
+\usage{
+mnpProb(beta, Sigma, X, r)
+}
+\arguments{
+  \item{beta}{ MNP coefficients }
+  \item{Sigma}{ Covariance matrix of latents }
+  \item{X}{ X array for one observation -- use \code{createX} to make }
+  \item{r}{ number of draws used in GHK (def: 100)}
+}
+\details{
+  see \code{\link{rmnpGibbs}} for definition of the model and the interpretation of
+  the beta, Sigma parameters. Uses the GHK method to compute choice probabilities.
+  To simulate a distribution of probabilities, loop over the beta, Sigma draws from
+  \code{rmnpGibbs} output.
+}
+\value{
+ p x 1 vector of choice probabilites 
+}
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi,Allenby and McCulloch,  Chapters 2 and 4. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+\seealso{ \code{\link{rmnpGibbs}}, \code{\link{createX}} }
+\examples{
+##
+## example of computing MNP probabilites
+##  here I'm thinking of Xa as having the prices of each of the 3 alternatives
+Xa=matrix(c(1,.5,1.5),nrow=1)
+X=createX(p=3,na=1,nd=NULL,Xa=Xa,Xd=NULL,DIFF=TRUE)
+beta=c(1,-1,-2)  ## beta contains two intercepts and the price coefficient
+Sigma=matrix(c(1,.5,.5,1),ncol=2)
+mnpProb(beta,Sigma,X)
+}
+\keyword{ models }
diff --git a/man/momMix.Rd b/man/momMix.Rd
index e8f382d..f6b67a9 100755
--- a/man/momMix.Rd
+++ b/man/momMix.Rd
@@ -34,7 +34,7 @@ momMix(probdraw, compdraw)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/numEff.Rd b/man/numEff.Rd
index fd08f58..bb61103 100755
--- a/man/numEff.Rd
+++ b/man/numEff.Rd
@@ -26,7 +26,7 @@ numEff(x, m = as.integer(min(length(x), (100/sqrt(5000)) * sqrt(length(x)))))
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rbiNormGibbs.Rd b/man/rbiNormGibbs.Rd
new file mode 100755
index 0000000..d60a12e
--- /dev/null
+++ b/man/rbiNormGibbs.Rd
@@ -0,0 +1,40 @@
+\name{rbiNormGibbs}
+\alias{rbiNormGibbs}
+\concept{bayes}
+\concept{Gibbs Sampling}
+\concept{MCMC}
+\concept{normal distribution}
+\title{ Illustrate Bivariate Normal Gibbs Sampler }
+\description{
+  \code{rbiNormGibbs} implements a Gibbs Sampler for the bivariate normal distribution.
+  Intermediate moves are shown and the output is contrasted with the iid sampler. i
+  This function is designed for illustrative/teaching purposes.
+}
+\usage{
+rbiNormGibbs(initx = 2, inity = -2, rho, burnin = 100, R = 500)
+}
+\arguments{
+  \item{initx}{ initial value of parameter on x axis (def: 2) }
+  \item{inity}{initial value of parameter on y axis (def: -2)  }
+  \item{rho}{ correlation for bivariate normals }
+  \item{burnin}{burn-in number of draws (def:100)  }
+  \item{R}{ number of MCMC draws (def:500) }
+}
+\details{
+  (theta1,theta2) ~ N((0,0), Sigma=matrix(c(1,rho,rho,1),ncol=2))
+}
+\value{
+ R x 2 array of draws
+}
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi, Allenby and McCulloch, Chapters 2 and 3. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\author{ Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+\examples{
+##
+\dontrun{ out=rbiNormGibbs(rho=.95) }
+}
+\keyword{ distribution}
diff --git a/man/rbprobitGibbs.Rd b/man/rbprobitGibbs.Rd
index 34f2cae..a50aa3e 100755
--- a/man/rbprobitGibbs.Rd
+++ b/man/rbprobitGibbs.Rd
@@ -41,7 +41,7 @@ rbprobitGibbs(Data, Prior, Mcmc)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rdirichlet.Rd b/man/rdirichlet.Rd
index c2b2e8b..8998322 100755
--- a/man/rdirichlet.Rd
+++ b/man/rdirichlet.Rd
@@ -18,7 +18,7 @@ rdirichlet(alpha)
   Vector of draws from Dirichlet
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rhierBinLogit.Rd b/man/rhierBinLogit.Rd
index 4c2fe40..a1ee6f9 100755
--- a/man/rhierBinLogit.Rd
+++ b/man/rhierBinLogit.Rd
@@ -5,7 +5,7 @@
 \concept{hierarchical models}
 \concept{binary logit}
 
-\title{ MCMC Algorithm for Hierachical Binary Logit }
+\title{ MCMC Algorithm for Hierarchical Binary Logit }
 \description{
   \code{rhierBinLogit} implements an MCMC algorithm for hierarchical binary logits with
   a normal heterogeneity distribution.  This is a hybrid sampler with a RW Metropolis step
@@ -64,7 +64,7 @@ rhierBinLogit(Data, Prior, Mcmc)
 \note{ Some experimentation with the Metropolis scaling paramter (sbeta) may be required. }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rhierLinearModel.Rd b/man/rhierLinearModel.Rd
index fbbc82f..659c87c 100755
--- a/man/rhierLinearModel.Rd
+++ b/man/rhierLinearModel.Rd
@@ -56,7 +56,7 @@ rhierLinearModel(Data, Prior, Mcmc)
   \item{Vbetadraw}{R/keep x nvar*nvar array of Vbeta draws}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rhierMnlRwMixture.Rd b/man/rhierMnlRwMixture.Rd
index 7a2c414..a0f1dac 100755
--- a/man/rhierMnlRwMixture.Rd
+++ b/man/rhierMnlRwMixture.Rd
@@ -63,6 +63,7 @@ rhierMnlRwMixture(Data, Prior, Mcmc)
   \item{betadraw}{ nlgt x nvar x R/keep array of draws of betas}
   \item{probdraw}{ R/keep x ncomp matrix of draws of probs of mixture components (pvec)}
   \item{compdraw}{ list of list of lists (length R/keep)}
+  \item{loglike}{ log-likelihood for each kept draw (length R/keep)}
 }
 \note{
   More on \code{compdraw} component of return value list: \cr
@@ -82,7 +83,7 @@ rhierMnlRwMixture(Data, Prior, Mcmc)
 
 } 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rhierNegbinRw.Rd b/man/rhierNegbinRw.Rd
index 999b466..da33cc8 100755
--- a/man/rhierNegbinRw.Rd
+++ b/man/rhierNegbinRw.Rd
@@ -72,7 +72,7 @@ rhierNegbinRw(Data, Prior, Mcmc)
 
 \seealso{ \code{\link{rnegbinRw}} }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rivGibbs.Rd b/man/rivGibbs.Rd
index 133dc34..bc8da32 100755
--- a/man/rivGibbs.Rd
+++ b/man/rivGibbs.Rd
@@ -52,6 +52,14 @@ rivGibbs(Data, Prior, Mcmc)
   \item{gammadraw}{R/keep x dim(gamma) array of gamma draws }
   \item{Sigmadraw}{R/keep x 4 array of Sigma draws}
 }
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Rob McCulloch and Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
 \examples{
 ##
 if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=2000} else {R=10}
diff --git a/man/rmixGibbs.Rd b/man/rmixGibbs.Rd
index 3398f8f..fdaadcc 100755
--- a/man/rmixGibbs.Rd
+++ b/man/rmixGibbs.Rd
@@ -23,7 +23,7 @@ rmixGibbs(y, Bbar, A, nu, V, a, p, z, comps)
 }
 \value{
   a list containing:
-  \item{p}{draw mixture probilities }
+  \item{p}{draw mixture probabilities }
   \item{z}{draw of indicators of each component}
   \item{comps}{new draw of normal component parameters }
 }
diff --git a/man/rmnlIndepMetrop.Rd b/man/rmnlIndepMetrop.Rd
index 70161a1..6e0e423 100755
--- a/man/rmnlIndepMetrop.Rd
+++ b/man/rmnlIndepMetrop.Rd
@@ -25,7 +25,7 @@ rmnlIndepMetrop(Data, Prior, Mcmc)
   \itemize{
     \item{\code{p}}{number of alternatives}
     \item{\code{y}}{ nobs vector of multinomial outcomes (1,\ldots, p)}
-    \item{\code{X}}{nobs*m x nvar matrix}
+    \item{\code{X}}{nobs*p x nvar matrix}
     \item{\code{A}}{ nvar x nvar pds prior prec matrix (def: .01I)}
     \item{\code{betabar}}{ nvar x 1 prior mean (def: 0)}
     \item{\code{R}}{ number of MCMC draws}
@@ -39,7 +39,7 @@ rmnlIndepMetrop(Data, Prior, Mcmc)
   \item{acceptr}{acceptance rate of Metropolis draws}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 5. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 \author{ Peter Rossi, Graduate School of Business, University of Chicago,
diff --git a/man/rmnpGibbs.Rd b/man/rmnpGibbs.Rd
index d4f14f4..c9f7cd4 100755
--- a/man/rmnpGibbs.Rd
+++ b/man/rmnpGibbs.Rd
@@ -60,7 +60,7 @@ rmnpGibbs(Data, Prior, Mcmc)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 4. \cr
+  by Rossi, Allenby and McCulloch, Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rmultireg.Rd b/man/rmultireg.Rd
index 0404947..6e98eab 100755
--- a/man/rmultireg.Rd
+++ b/man/rmultireg.Rd
@@ -35,7 +35,7 @@ rmultireg(Y, X, Bbar, A, nu, V)
   \item{Sigma }{ draw of Sigma }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rmultiregfp.Rd b/man/rmultiregfp.Rd
index 5fc5ea5..f975747 100755
--- a/man/rmultiregfp.Rd
+++ b/man/rmultiregfp.Rd
@@ -31,7 +31,7 @@ rmultiregfp(Y, X, Fparm)
   \item{Sigma }{ draw of \eqn{Sigma} }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rmvpGibbs.Rd b/man/rmvpGibbs.Rd
index ceddebd..3218f39 100755
--- a/man/rmvpGibbs.Rd
+++ b/man/rmvpGibbs.Rd
@@ -60,7 +60,7 @@ rmvpGibbs(Data, Prior, Mcmc)
 } 
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 4. \cr
+  by Rossi, Allenby and McCulloch, Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rmvst.Rd b/man/rmvst.Rd
index ce0b788..9d40e80 100755
--- a/man/rmvst.Rd
+++ b/man/rmvst.Rd
@@ -20,7 +20,7 @@ rmvst(nu, mu, root)
   length(mu) draw vector
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rnegbinRw.Rd b/man/rnegbinRw.Rd
index 643f4d0..5d0bb0b 100755
--- a/man/rnegbinRw.Rd
+++ b/man/rnegbinRw.Rd
@@ -60,7 +60,7 @@ rnegbinRw(Data, Prior, Mcmc)
 \seealso{ \code{\link{rhierNegbinRw}} }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby, McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/rnmixGibbs.Rd b/man/rnmixGibbs.Rd
index d6375c5..4004569 100755
--- a/man/rnmixGibbs.Rd
+++ b/man/rnmixGibbs.Rd
@@ -38,7 +38,7 @@ rnmixGibbs(Data, Prior, Mcmc)
   List arguments contain:
   \itemize{
     \item{y}{ n x k array of data (rows are obs) }
-    \item{Mubar}{ k x 1 array with prior mean of normal comp means (def: 0)}
+    \item{Mubar}{ 1 x k array with prior mean of normal comp means (def: 0)}
     \item{A}{ 1 x 1 precision parameter for prior on mean of normal comp (def: .01)}
     \item{nu}{ d.f. parameter for prior on Sigma (normal comp cov matrix) (def: k+3)}
     \item{V}{ k x k location matrix of IW prior on Sigma (def: nuI)}
@@ -63,7 +63,7 @@ rnmixGibbs(Data, Prior, Mcmc)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -71,7 +71,9 @@ rnmixGibbs(Data, Prior, Mcmc)
   \email{Peter.Rossi at ChicagoGsb.edu}.
 }
 
-\seealso{ \code{\link{rmixture}}, \code{\link{rmixGibbs}} ,\code{\link{eMixMargDen}}, \code{\link{momMix}}}
+\seealso{ \code{\link{rmixture}}, \code{\link{rmixGibbs}} ,\code{\link{eMixMargDen}}, \code{\link{momMix}},
+ \code{\link{mixDen}}, \code{\link{mixDenBi}}}
+
 \examples{
 ##
 if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=2000} else {R=10}
@@ -90,7 +92,7 @@ dm = rmixture(nobs,pvec,comps)
 
 Data=list(y=dm$x)
 ncomp=9
-Prior=list(ncomp=ncomp,a=c(rep(1,ncomp)))
+Prior=list(ncomp=ncomp)
 Mcmc=list(R=R,keep=1)
 out=rnmixGibbs(Data=Data,Prior=Prior,Mcmc=Mcmc)
 
@@ -110,5 +112,48 @@ rownames(mat)=c("true","post expect")
 cat(" corr and posterior expectation of corr",fill=TRUE)
 print(t(mat))
 
+if(0){
+##
+## plotting examples
+##
+## check true and estimated marginal densities
+grid=NULL
+for (i in 1:dim){
+  rgi=range(dm$x[,i])
+  gr=seq(from=rgi[1],to=rgi[2],length.out=50)
+  grid=cbind(grid,gr)
+}
+
+tmden=mixDen(grid,pvec,comps)
+pmden=eMixMargDen(grid,out$probdraw[begin:end,],out$compdraw[begin:end])
+
+## plot the marginal on third variable
+plot(range(grid[,3]),c(0,1.1*max(tmden[,3],pmden[,3])),type="n",xlab="",ylab="density")
+lines(grid[,3],tmden[,3],col="blue",lwd=2)
+lines(grid[,3],pmden[,3],col="red",lwd=2)
+
+## compute implied bivariate marginal distributions
+i=1
+j=2
+rxi=range(dm$x[,1])
+rxj=range(dm$x[,2])
+xi=seq(from=rxi[1],to=rxi[2],length.out=50)
+xj=seq(from=rxj[1],to=rxj[2],length.out=50)
+den=matrix(0,ncol=length(xi),nrow=length(xj))
+for(ind in as.integer(seq(from=begin,to=end,length.out=100))){
+  den=den+mixDenBi(i,j,xi,xj,out$probdraw[ind,],out$compdraw[[ind]])
+}
+tden=matrix(0,ncol=length(xi),nrow=length(xj))
+tden=mixDenBi(i,j,xi,xj,pvec,comps)
+
+par(mfrow=c(2,1))
+image(xi,xj,tden,col=terrain.colors(100),xlab="",ylab="")
+contour(xi,xj,den,add=TRUE,drawlabels=FALSE)
+title("True Bivariate Marginal")
+image(xi,xj,den,col=terrain.colors(100),xlab="",ylab="")
+contour(xi,xj,den,add=TRUE,drawlabels=FALSE)
+title("Posterior Mean of Bivariate Marginal")
+}
+
 }
 \keyword{ multivariate }
diff --git a/man/rscaleUsage.Rd b/man/rscaleUsage.Rd
index 8abf18f..4106a4c 100755
--- a/man/rscaleUsage.Rd
+++ b/man/rscaleUsage.Rd
@@ -57,7 +57,7 @@ rscaleUsage(Data,Prior, Mcmc)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Case Study on Scale Usage Heterogeneity. \cr
+  by Rossi, Allenby, and McCulloch, Case Study on Scale Usage Heterogeneity. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -67,12 +67,11 @@ rscaleUsage(Data,Prior, Mcmc)
 
 \examples{
 ##
-if(nchar(Sys.getenv("LONG_TEST")) != 0) 
+if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=1000} else {R=5} 
 {
 data(customerSat)
 surveydat = list(k=10,x=as.matrix(customerSat))
 
-R=1000
 mcmc = list(R=R)
 set.seed(66)
 out=rscaleUsage(Data=surveydat,Mcmc=mcmc)
diff --git a/man/rsurGibbs.Rd b/man/rsurGibbs.Rd
new file mode 100755
index 0000000..cd521f3
--- /dev/null
+++ b/man/rsurGibbs.Rd
@@ -0,0 +1,98 @@
+\name{rsurGibbs}
+\alias{rsurGibbs}
+\concept{bayes}
+\concept{Gibbs Sampler}
+\concept{regression}
+\concept{SUR model}
+\concept{Seemingly Unrelated Regression}
+\concept{MCMC}
+
+\title{ Gibbs Sampler for Seemingly Unrelated Regressions (SUR) }
+\description{
+ \code{rsurGibbs} implements a Gibbs Sampler to draw from the posterior of the Seemingly Unrelated
+  Regression (SUR) Model of Zellner
+}
+\usage{
+rsurGibbs(Data, Prior, Mcmc)
+}
+\arguments{
+  \item{Data}{ list(regdata)}
+  \item{Prior}{ list(betabar,A, nu, V) }
+  \item{Mcmc}{ list(R,keep)}
+}
+\details{
+  Model: \eqn{y_i = X_ibeta_i + e_i}.  i=1,\ldots,m. m regressions. \cr
+  (e(1,k), \ldots, e(m,k)) \eqn{\sim}{~} \eqn{N(0,Sigma)}. k=1, \ldots, nobs. 
+
+  We can also write as the stacked model: \cr
+  \eqn{y = Xbeta + e} where y is a nobs*m long vector and k=length(beta)=sum(length(betai)).
+
+  Note: we must have the same number of observations in each equation but we can have different numbers
+  of X variables 
+
+  Priors: \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})}.  \eqn{Sigma} \eqn{\sim}{~} \eqn{IW(nu,V)}.
+
+  List arguments contain  
+  \itemize{
+    \item{\code{regdata}}{list of lists, regdata[[i]]=list(y=yi,X=Xi)}
+    \item{\code{betabar}}{k x 1 prior mean (def: 0)}
+    \item{\code{A}}{k x k prior precision matrix (def: .01I)} 
+    \item{\code{nu}}{ d.f. parm for Inverted Wishart prior (def: m+3)}
+    \item{\code{V}}{ scale parm for Inverted Wishart prior (def: nu*I)}
+    \item{\code{R}}{ number of MCMC draws }
+    \item{\code{keep}}{ thinning parameter - keep every keepth draw }
+  }
+}
+\value{
+  list of MCMC draws
+  \item{betadraw }{ R x k array of betadraws }
+  \item{Sigmadraw }{ R x (m*m) array of Sigma draws}
+}
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+
+\seealso{ \code{\link{rmultireg}} }
+
+\examples{
+if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=1000} else {R=10}
+##
+## simulate data from SUR
+set.seed(66)
+beta1=c(1,2)
+beta2=c(1,-1,-2)
+nobs=100
+nreg=2
+iota=c(rep(1,nobs))
+X1=cbind(iota,runif(nobs))
+X2=cbind(iota,runif(nobs),runif(nobs))
+Sigma=matrix(c(.5,.2,.2,.5),ncol=2)
+U=chol(Sigma)
+E=matrix(rnorm(2*nobs),ncol=2)%*%U
+y1=X1\%*\%beta1+E[,1]
+y2=X2\%*\%beta2+E[,2]
+##
+## run Gibbs Sampler
+regdata=NULL
+regdata[[1]]=list(y=y1,X=X1)
+regdata[[2]]=list(y=y2,X=X2)
+Mcmc=list(R=R)
+out=rsurGibbs(Data=list(regdata=regdata),Mcmc=Mcmc)
+##
+## summarize GS output
+if(R < 100) {begin=1} else {begin=100}
+end=Mcmc$R
+cat(" betadraws ",fill=TRUE)
+mat=apply(out$betadraw[begin:end,],2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(c(beta1,beta2),mat); rownames(mat)[1]="beta"; print(mat)
+cat(" Sigmadraws ",fill=TRUE)
+mat=apply(out$Sigmadraw[begin:end,],2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(Sigma),mat); rownames(mat)[1]="sigma"; print(mat)
+}
+
+\keyword{ regression}
diff --git a/man/rtrun.Rd b/man/rtrun.Rd
index 4ac24ad..8918219 100755
--- a/man/rtrun.Rd
+++ b/man/rtrun.Rd
@@ -24,7 +24,7 @@ rtrun(mu, sigma, a, b)
   draw (possibly a vector)
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/runireg.Rd b/man/runireg.Rd
index a095458..b3168ff 100755
--- a/man/runireg.Rd
+++ b/man/runireg.Rd
@@ -2,36 +2,43 @@
 \alias{runireg}
 \concept{bayes}
 \concept{regression}
-\concept{simulation}
 
-\title{ Draw from Posterior for Univariate Regression }
+\title{ IID Sampler for Univariate Regression }
 \description{
- \code{runireg} draws from posterior for univariate regression with a natural conjugate prior.
+ \code{runireg} implements an iid sampler to draw from the posterior of a univariate regression with a conjugate prior.
 }
 \usage{
-runireg(y, X, betabar, A, nu, ssq)
+runireg(Data, Prior, Mcmc)
 }
 \arguments{
-  \item{y}{ n x 1 dep var  }
-  \item{X}{ n x k Design matrix }
-  \item{betabar}{ prior mean }
-  \item{A}{ k x k pds prior precision matrix }
-  \item{nu}{ d.f. parameter for sigma-sq prior }
-  \item{ssq}{ scale parameter for sigma-sq prior }
+  \item{Data}{ list(y,X)}
+  \item{Prior}{ list(betabar,A, nu, ssq) }
+  \item{Mcmc}{ list(R,keep)}
 }
 \details{
   Model: \eqn{y = Xbeta + e}.  \eqn{e} \eqn{\sim}{~} \eqn{N(0,sigmasq)}. \cr
 
-  Priors: beta given sigmasq \eqn{\sim}{~} \eqn{N(betabar,sigmasq*A^{-1})}.\cr
-  \eqn{sigmasq} \eqn{\sim}{~} eqn{(nu*ssq)}/\eqn{\chi^2_{nu}}.
+  Priors: \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,sigmasq*A^{-1})}. 
+ \eqn{sigmasq} \eqn{\sim}{~} \eqn{(nu*ssq)/chisq_{nu}}.
+  List arguments contain  
+  \itemize{
+    \item{\code{X}}{n x k Design Matrix}
+    \item{\code{y}}{n x 1 vector of observations} 
+    \item{\code{betabar}}{k x 1 prior mean (def: 0)}
+    \item{\code{A}}{k x k prior precision matrix (def: .01I)} 
+    \item{\code{nu}}{ d.f. parm for Inverted Chi-square prior (def: 3)}
+    \item{\code{ssq}}{ scale parm for Inverted Chi-square prior (def: var(y))}
+    \item{\code{R}}{ number of draws }
+    \item{\code{keep}}{ thinning parameter - keep every keepth draw }
+  }
 }
 \value{
-  a list with one draw from posterior
-  \item{beta }{beta draw}
-  \item{sigmasq }{sigmasq draw}
+  list of iid draws
+  \item{betadraw }{ R x k array of betadraws }
+  \item{sigmasqdraw }{ R vector of sigma-sq draws}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr 
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -39,33 +46,20 @@ runireg(y, X, betabar, A, nu, ssq)
   \email{Peter.Rossi at ChicagoGsb.edu}.
 }
 
-\section{Warning}{
-  This routine is a utility routine that does \strong{not} check the
-  input arguments for proper dimensions and type.
-}
-
 \seealso{ \code{\link{runiregGibbs}} }
 \examples{
-if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=1000} else {R=10}
+if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=2000} else {R=10}
 set.seed(66)
-n=100
+n=200
 X=cbind(rep(1,n),runif(n)); beta=c(1,2); sigsq=.25
 y=X\%*\%beta+rnorm(n,sd=sqrt(sigsq))
 
-A=diag(c(.05,.05)); betabar=c(0,0)
-nu=3; ssq=1.0
-
-betadraw=matrix(double(R*2),ncol=2)
-sigsqdraw=double(R)
-for (rep in 1:R) 
-   {out=runireg(y,X,betabar,A,nu,ssq);betadraw[rep,]=out$beta
-    sigsqdraw[rep]=out$sigmasq}
-
-cat(" Betadraws ",fill=TRUE)
-mat=apply(betadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+out=runireg(Data=list(y=y,X=X),Mcmc=list(R=R))
+cat(" betadraws ",fill=TRUE)
+mat=apply(out$betadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
 mat=rbind(beta,mat); rownames(mat)[1]="beta"; print(mat)
 cat(" Sigma-sq draws",fill=TRUE)
 cat(" sigma-sq= ",sigsq,fill=TRUE)
-print(quantile(sigsqdraw,probs=c(.01,.05,.5,.95,.99)))
+print(quantile(out$sigmasqdraw,probs=c(.01,.05,.5,.95,.99)))
 }
 \keyword{ regression }
diff --git a/man/runiregGibbs.Rd b/man/runiregGibbs.Rd
index bbd641e..2f0eb4a 100755
--- a/man/runiregGibbs.Rd
+++ b/man/runiregGibbs.Rd
@@ -7,7 +7,7 @@
 
 \title{ Gibbs Sampler for Univariate Regression }
 \description{
- \code{runiregGibbs} implements a Gibbs Sampler to draw from posterior for univariate regression with a conditionally conjugate prior.
+ \code{runiregGibbs} implements a Gibbs Sampler to draw from posterior of a univariate regression with a conditionally conjugate prior.
 }
 \usage{
 runiregGibbs(Data, Prior, Mcmc)
@@ -23,12 +23,12 @@ runiregGibbs(Data, Prior, Mcmc)
   Priors: \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})}.  \eqn{sigmasq} \eqn{\sim}{~} \eqn{(nu*ssq)/chisq_{nu}}.
   List arguments contain  
   \itemize{
-    \item{\code{X}}{Design Matrix}
-    \item{\code{y}}{n x 1 vector of observations, (0 or 1)} 
+    \item{\code{X}}{n x k Design Matrix}
+    \item{\code{y}}{n x 1 vector of observations} 
     \item{\code{betabar}}{k x 1 prior mean (def: 0)}
     \item{\code{A}}{k x k prior precision matrix (def: .01I)} 
-    \item{\code{nu}}{ d.f. parm for Inverted Chi-square prior}
-    \item{\code{ssq}}{ scale parm for Inverted Chi-square prior}
+    \item{\code{nu}}{ d.f. parm for Inverted Chi-square prior (def: 3)}
+    \item{\code{ssq}}{ scale parm for Inverted Chi-square prior (def:var(y))}
     \item{\code{R}}{ number of MCMC draws }
     \item{\code{keep}}{ thinning parameter - keep every keepth draw }
   }
@@ -39,7 +39,7 @@ runiregGibbs(Data, Prior, Mcmc)
   \item{sigmasqdraw }{ R vector of sigma-sq draws}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 3. \cr
+  by Rossi, Allenby and McCulloch, Chapter 3. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
@@ -49,7 +49,7 @@ runiregGibbs(Data, Prior, Mcmc)
 
 \seealso{ \code{\link{runireg}} }
 \examples{
-if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=2000} else {R=10}
+if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=1000} else {R=10}
 set.seed(66)
 n=100
 X=cbind(rep(1,n),runif(n)); beta=c(1,2); sigsq=.25
@@ -60,7 +60,7 @@ nu=3; ssq=1.0
 
 Data=list(y=y,X=X); Mcmc=list(R=R,keep=1) ; Prior=list(A=A,betabar=betabar,nu=nu,ssq=ssq)
 out=runiregGibbs(Data=Data,Prior=Prior,Mcmc=Mcmc)
-cat(" Betadraws ",fill=TRUE)
+cat(" betadraws ",fill=TRUE)
 mat=apply(out$betadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
 mat=rbind(beta,mat); rownames(mat)[1]="beta"; print(mat)
 cat(" Sigma-sq draws",fill=TRUE)
diff --git a/man/rwishart.Rd b/man/rwishart.Rd
index f87b7d0..cb8cfd5 100755
--- a/man/rwishart.Rd
+++ b/man/rwishart.Rd
@@ -29,7 +29,7 @@ rwishart(nu, V)
   \item{CI }{ inv(C), \eqn{W^{-1}} = CICI'}
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 2. \cr
+  by Rossi, Allenby and McCulloch, Chapter 2. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/simmnl.Rd b/man/simmnl.Rd
index bc6e75e..a3d4ac5 100755
--- a/man/simmnl.Rd
+++ b/man/simmnl.Rd
@@ -25,7 +25,7 @@ simmnl(p, n, beta)
   \item{prob}{n x j array of choice probabilities }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby, and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/simmnlwX.Rd b/man/simmnlwX.Rd
index be71413..06e7c31 100755
--- a/man/simmnlwX.Rd
+++ b/man/simmnlwX.Rd
@@ -20,7 +20,7 @@ simmnlwX(n, X, beta)
   \item{prob}{ n x p array of choice probs }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/simmnp.Rd b/man/simmnp.Rd
index c7fb500..916e1eb 100755
--- a/man/simmnp.Rd
+++ b/man/simmnp.Rd
@@ -25,7 +25,7 @@ simmnp(X, p, n, beta, sigma)
   \item{sigma}{ covariance matrix }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi, Chapter 4. \cr
+  by Rossi, Allenby and McCulloch, Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/simmvp.Rd b/man/simmvp.Rd
index a3d19ed..536e287 100755
--- a/man/simmvp.Rd
+++ b/man/simmvp.Rd
@@ -24,7 +24,7 @@ simmvp(X, p, n, beta, sigma)
   \item{sigma}{ covariance matrix }
 }
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch, Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 
diff --git a/man/simnhlogit.Rd b/man/simnhlogit.Rd
index d0e13eb..3a602db 100755
--- a/man/simnhlogit.Rd
+++ b/man/simnhlogit.Rd
@@ -27,7 +27,7 @@ simnhlogit(theta, lnprices, Xexpend)
 }
 
 \references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
-  by Allenby, McCulloch, and Rossi. \cr
+  by Rossi, Allenby and McCulloch, Chapter 4. \cr
   \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
 }
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/r-cran-bayesm.git



More information about the debian-science-commits mailing list