[r-cran-bayesm] 07/44: Import Upstream version 1.1-0

Andreas Tille tille at debian.org
Thu Sep 7 11:16:20 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository r-cran-bayesm.

commit b99bf2db0a576d00a61c1e1275ef1d1658279242
Author: Andreas Tille <tille at debian.org>
Date:   Thu Sep 7 13:08:17 2017 +0200

    Import Upstream version 1.1-0
---
 DESCRIPTION                |   8 +-
 NAMESPACE                  |   2 +-
 R/rhierMnlRwMixture.R      |   6 +-
 R/rhierNegbinRw.R          | 312 +++++++++++++++++++++++++++++++++++++++++++++
 R/rnegbinRw.R              | 210 ++++++++++++++++++++++++++++++
 data/detailing.rda         | Bin 0 -> 687283 bytes
 inst/doc/bayesm-manual.pdf | Bin 369507 -> 390357 bytes
 man/detailing.Rd           | 113 ++++++++++++++++
 man/rhierMnlRwMixture.Rd   |   5 +-
 man/rhierNegbinRw.Rd       | 156 +++++++++++++++++++++++
 man/rmnlIndepMetrop.Rd     |  10 +-
 man/rnegbinRw.Rd           | 116 +++++++++++++++++
 12 files changed, 929 insertions(+), 9 deletions(-)

diff --git a/DESCRIPTION b/DESCRIPTION
index f232098..23ca4a5 100755
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: bayesm
-Version: 1.0-2
-Date: 2005-05-25
+Version: 1.1-0
+Date: 2005-06-10
 Title:Bayesian Inference for Marketing/Micro-econometrics 
 Author: Peter Rossi <peter.rossi at ChicagoGsb.edu>, 
         Rob McCulloch <robert.mcculloch at ChicagoGsb.edu>.
@@ -12,10 +12,12 @@ Description: bayesm covers many important models used
   Bayes Regression (univariate or multivariate dep var),
   Multinomial Logit (MNL) and Multinomial Probit (MNP),
   Multivariate Probit,
+  Negative Binomial Regression,
   Multivariate Mixtures of Normals,
   Hierarchical Linear Models with normal prior and covariates,
   Hierarchical Multinomial Logits with mixture of normals prior
      and covariates,
+  Hierarchical Negative Binomial Regression Models,
   Bayesian analysis of choice-based conjoint data,
   Bayesian treatment of linear instrumental variables models,
   and
@@ -25,4 +27,4 @@ Description: bayesm covers many important models used
   Marketing by Allenby, McCulloch and Rossi. 
 License: GPL (version 2 or later)
 URL: http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html
-Packaged: Wed May 25 09:59:25 2005; per
+Packaged: Fri Jun 10 13:10:33 2005; per
diff --git a/NAMESPACE b/NAMESPACE
index 7e2f779..5ebdf40 100755
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -6,7 +6,7 @@ rmixture,rmultireg,rmultiregfp,rwishart,rmvst,rtrun,rbprobitGibbs,runireg,
 runiregGibbs,simmnl,simmnp,simmvp,simnhlogit,rmnpGibbs,rmixGibbs,rnmixGibbs,
 rmvpGibbs,rhierLinearModel,rhierMnlRwMixture,simmnlwX,rivGibbs,
 rmnlIndepMetrop,rscaleUsage,ghkvec,condMom,logMargDenNR,init.rmultiregfp,
-rhierBinLogit)
+rhierBinLogit,rnegbinRw,rhierNegbinRw)
 
 
 
diff --git a/R/rhierMnlRwMixture.R b/R/rhierMnlRwMixture.R
index 8b90465..f4f4995 100755
--- a/R/rhierMnlRwMixture.R
+++ b/R/rhierMnlRwMixture.R
@@ -245,9 +245,9 @@ cov%*%(xty+Ad%*%deltabar) + t(chol(cov))%*%rnorm(length(deltabar))
 }
 #-------------------------------------------------------------------------------------------------------
 #
-# intialize priors and compute quantities for Metropolis
+# intialize compute quantities for Metropolis
 #
-cat("initializing priors and Metropolis candidate densities ...",fill=TRUE)
+cat("initializing Metropolis candidate densities for ",nlgt," units ...",fill=TRUE)
 fsh()
 #
 #  now go thru and computed fraction likelihood estimates and hessians
@@ -275,6 +275,8 @@ for (i in 1:nlgt)
      { lgtdata[[i]]=c(lgtdata[[i]],list(converge=0,betafmle=c(rep(0,nvar)),
         hess=diag(nvar))) }
    oldbetas[i,]=lgtdata[[i]]$betafmle
+   if(i%%50 ==0) cat("  completed unit #",i,fill=TRUE)
+   fsh()
 }
 #
 #  initialize values
diff --git a/R/rhierNegbinRw.R b/R/rhierNegbinRw.R
new file mode 100755
index 0000000..6e44821
--- /dev/null
+++ b/R/rhierNegbinRw.R
@@ -0,0 +1,312 @@
+rhierNegbinRw = 
+function(Data, Prior, Mcmc) {
+
+#   Revision History
+#	  Sridhar Narayanan - 05/2005
+#         P. Rossi 6/05
+#
+#   Model
+#       (y_i|lambda_i,alpha) ~ Negative Binomial(Mean = lambda_i, Overdispersion par = alpha)
+#
+#       ln(lambda_i) =  X_i * beta_i
+#
+#       beta_i = Delta'*z_i + nu_i
+#               nu_i~N(0,Vbeta)
+#
+#   Priors
+#       vec(Delta|Vbeta) ~ N(vec(Deltabar), Vbeta (x) (Adelta^-1))
+#       Vbeta ~ Inv Wishart(nu, V)
+#       alpha ~ Gamma(a,b) where mean = a/b and variance = a/(b^2)
+#
+#   Arguments
+#       Data = list of regdata,Z 
+#           regdata is a list of lists each list with members y, X
+#              e.g. regdata[[i]]=list(y=y,X=X)
+#              X has nvar columns including a first column of ones
+#              Z is nreg=length(regdata) x nz with a first column of ones
+#
+#       Prior - list containing the prior parameters
+#           Deltabar, Adelta - mean of Delta prior, inverse of variance covariance of Delta prior
+#           nu, V - parameters of Vbeta prior
+#           a, b - parameters of alpha prior
+#
+#       Mcmc - list containing
+#           R is number of draws
+#           keep is thinning parameter (def = 1)
+#           s_beta - scaling parameter for beta RW (def = 2.93/sqrt(nvar))
+#           s_alpha - scaling parameter for alpha RW (def = 2.93)
+#           c - fractional weighting parameter (def = 2)
+#           Vbeta0, Delta0 - initial guesses for parameters, if not supplied default values are used
+#
+
+
+#
+# Definitions of functions used within rhierNegbinRw
+#
+
+llnegbin = 
+function(par,X,y, nvar, nreg) {
+# Computes the log-likelihood
+    beta = par[1:nvar]
+    alpha = exp(par[nvar+1])
+    ll = sum(log(dnbinom(y,size=alpha,mu=exp(X%*%beta))))
+}
+
+llnegbinFract = 
+function(par,X,y,Xpooled, ypooled, power, nvar, nreg) {
+# Computes the fractional log-likelihood at the unit level
+#    = l_i * l_bar^power
+    par = c(par,mle$par[nvar+1])
+    llnegbin(par,X,y,nvar,nreg) + power*llnegbin(par,Xpooled,ypooled, nvar, nreg) 
+}
+
+lpostbetai = 
+function(beta, alpha, X, y, Delta, Z, Vbetainv) {
+# Computes the unnormalized log posterior for beta at the unit level
+    lambda = exp(X %*% as.vector(beta))
+    p = alpha/(alpha + lambda)
+    residual = as.vector(beta - as.vector(Z%*%Delta))
+    sum(alpha * log(p) + y * log(1-p)) - 0.5*( t(residual)%*%Vbetainv%*%residual)
+}
+
+
+lpostalpha = 
+function(alpha, beta, regdata, ypooled, a, b, nreg) {
+# Computes the unnormalized log posterior for alpha
+    Xbeta=NULL
+    for (i in 1:nreg) {Xbeta = rbind(Xbeta,regdata[[i]]$X%*%beta[i,]) }
+    sum(log(dnbinom(ypooled,size=alpha,mu=exp(Xbeta)))) + (a-1)*log(alpha) - b* alpha
+}
+
+
+#
+# Error Checking
+#
+pandterm=function(message) {stop(message,call.=FALSE)}
+if(missing(Data)) {pandterm("Requires Data argument -- list of regdata and (possibly) Z")}
+
+if(is.null(Data$regdata)) {
+    pandterm("Requires Data element regdata -- list of data for each unit : y and X")
+}
+regdata=Data$regdata
+nreg = length(regdata)
+
+if (is.null(Data$Z)) {
+    cat("Z not specified - using a column of ones instead", fill = TRUE)
+    Z = matrix(rep(1,nreg),ncol=1)
+}
+else {
+    if (nrow(Data$Z) != nreg) {
+        pandterm(paste("Nrow(Z) ", nrow(Z), "ne number units ",nreg))
+    }
+    else {
+        Z = Data$Z
+    }
+}
+nz = ncol(Z)
+
+dimfun = function(l) {
+    c(length(l$y),dim(l$X))
+}
+dims=sapply(regdata,dimfun)
+dims = t(dims)
+nvar = quantile(dims[,3],prob=0.5)
+for (i in 1:nreg) {
+        if (dims[i, 1] != dims[i, 2] || dims[i, 3] != nvar) {
+            pandterm(paste("Bad Data dimensions for unit ", i, 
+                " dims(y,X) =", dims[i, ]))
+        }
+}
+
+ypooled = NULL
+Xpooled = NULL
+for (i in 1:nreg) {
+    ypooled = c(ypooled,regdata[[i]]$y)
+    Xpooled = rbind(Xpooled,regdata[[i]]$X)
+}
+
+nvar=ncol(Xpooled)
+#
+# check for prior elements
+#
+if(missing(Prior)) {
+    Deltabar=matrix(rep(0,nvar*nz),nrow=nz) ; Adelta=0.01*diag(nz) ; nu=nvar+3; V=nu*diag(nvar); a=0.5; b=0.1;
+}
+else {
+    if(is.null(Prior$Deltabar)) {Deltabar=matrix(rep(0,nvar*nz),nrow=nz)} else {Deltabar=Prior$Deltabar}
+    if(is.null(Prior$Adelta)) {Adelta=0.01*diag(nz)} else {Adelta=Prior$Adelta}
+    if(is.null(Prior$nu)) {nu=nvar+3} else {nu=Prior$nu}
+    if(is.null(Prior$V)) {V=nu*diag(nvar)} else {V=Prior$V}
+    if(is.null(Prior$a)) {a=0.5} else {a=Prior$a}
+    if(is.null(Prior$b)) {b=0.1} else {b=Prior$b}
+}
+
+if(sum(dim(Deltabar) == c(nz,nvar)) != 2) pandterm("Deltabar is of incorrect dimension")
+if(sum(dim(Adelta)==c(nz,nz)) != 2) pandterm("Adelta is of incorrect dimension")
+if(nu < nvar) pandterm("invalid nu value")
+if(sum(dim(V)==c(nvar,nvar)) != 2) pandterm("V is of incorrect dimension")
+if((length(a) != 1) | (a <=0)) pandterm("a should be a positive number")
+if((length(b) != 1) | (b <=0)) pandterm("b should be a positive number")
+
+#
+# check for Mcmc 
+#
+if(missing(Mcmc)) pandterm("Requires Mcmc argument -- at least R")
+if(is.null(Mcmc$R)) {pandterm("Requires element R of Mcmc")} else {R=Mcmc$R}
+if(is.null(Mcmc$Vbeta0)) {Vbeta0=diag(nvar)} else {Vbeta0=Mcmc$Vbeta0}
+if(sum(dim(Vbeta0) == c(nvar,nvar)) !=2) pandterm("Vbeta0 is not of dimension nvar")
+if(is.null(Mcmc$Delta0)) {Delta0=matrix(rep(0,nz*nvar),nrow=nz)} else {Delta0=Mcmc$Delta0}
+if(sum(dim(Delta0) == c(nz,nvar)) !=2) pandterm("Delta0 is not of dimension nvar by nz")
+if(is.null(Mcmc$keep)) {keep=1} else {keep=Mcmc$keep}
+if(is.null(Mcmc$s_alpha)) {cat("Using default s_alpha = 2.93",fill=TRUE); s_alpha=2.93} 
+    else {s_alpha= Mcmc$s_alpha }
+if(is.null(Mcmc$s_beta)) {cat("Using default s_beta = 2.93/sqrt(nvar)",fill=TRUE); s_beta=2.93/sqrt(nvar)} 
+    else {s_beta=Mcmc$s_beta }
+if(is.null(Mcmc$c)) {cat("Using default c = 2"); c=2} 
+    else {c = Mcmc$c}
+
+#
+# print out problem
+#
+cat(" ",fill=TRUE)
+cat("Starting Random Walk Metropolis Sampler for Hierarchical Negative Binomial Regression",fill=TRUE)
+cat("  ",nobs," obs; ",nvar," covariates (including the intercept); ",fill=TRUE)
+cat("  ",nz," individual characteristics (including the intercept) ",fill=TRUE)
+cat(" ",fill=TRUE)
+cat("Prior Parameters:",fill=TRUE)
+cat("Deltabar",fill=TRUE)
+print(Deltabar)
+cat("Adelta",fill=TRUE)
+print(Adelta)
+cat("nu",fill=TRUE)
+print(nu)
+cat("V",fill=TRUE)
+print(V)
+cat("a",fill=TRUE)
+print(a)
+cat("b",fill=TRUE)
+print(b)
+cat(" ",fill=TRUE)
+cat("MCMC Parameters:",fill=TRUE)
+cat(R," reps; keeping every ",keep,"th draw",fill=TRUE)
+cat("s_alpha = ",s_alpha,fill=TRUE)
+cat("s_beta = ",s_beta,fill=TRUE)
+cat("Fractional Scaling c = ",c,fill=TRUE)
+cat(" ",fill=TRUE)
+
+par = rep(0,(nvar+1))
+cat("initializing Metropolis candidate densities for ",nreg,"units ...",fill=TRUE)
+fsh()
+mle = optim(par,llnegbin, X=Xpooled, y=ypooled, nvar=nvar, nreg=nreg, method="L-BFGS-B", upper=c(Inf,Inf,Inf,log(100000000)), hessian=TRUE, control=list(fnscale=-1))
+fsh()
+beta_mle=mle$par[1:nvar]
+alpha_mle = exp(mle$par[nvar+1])
+varcovinv = -mle$hessian
+Delta = Delta0
+Beta = t(matrix(rep(beta_mle,nreg),ncol=nreg))
+Vbetainv = solve(Vbeta0)
+Vbeta = Vbeta0
+alpha = alpha_mle
+alphacvar = s_alpha/varcovinv[nvar+1,nvar+1]
+alphacroot = sqrt(alphacvar)
+#cat("beta_mle = ",beta_mle,fill=TRUE)
+#cat("alpha_mle = ",alpha_mle, fill = TRUE)
+#fsh()
+
+hess_i=NULL
+# Find the individual candidate hessian
+for (i in 1:nreg) {
+    power = length(regdata[[i]]$y)/(c*nobs)
+    mle2 = optim(mle$par[1:nvar],llnegbinFract, X=regdata[[i]]$X, y=regdata[[i]]$y, Xpooled=Xpooled, ypooled=ypooled, power=power, nvar=nvar, nreg=nreg, method="BFGS", hessian=TRUE, control=list(fnscale=-1, trace=0))
+    if (mle2$convergence==0)
+        hess_i[[i]] = list(hess=-mle2$hessian)
+    else
+        hess_i[[i]] = diag(rep(0,nvar))
+   if(i%%50 ==0) cat("  completed unit #",i,fill=TRUE)	
+   fsh()
+}
+
+oldlpostbeta = rep(0,nreg)
+nacceptbeta = 0
+nacceptalpha = 0
+clpostbeta = rep(0,nreg)
+
+Betadraw = array(double((floor(R/keep)) * nreg * nvar), dim = c(nreg, 
+        nvar, floor(R/keep)))
+
+alphadraw = rep(0,floor(R/keep))
+llike = rep(0,floor(R/keep))
+Vbetadraw=matrix(double(floor(R/keep)*(nvar*nvar)),ncol=(nvar*nvar))
+Deltadraw=matrix(double(floor(R/keep)*(nvar*nz)),ncol=(nvar*nz))
+
+#
+#	start main iteration loop
+#
+itime=proc.time()[3]
+cat(" ",fill=TRUE)
+cat("MCMC Iteration (est time to end - min) ",fill=TRUE)
+fsh()
+
+for (r in 1:R) 
+{
+#   Draw betai
+    for (i in 1:nreg) {
+        betacvar = s_beta*solve(hess_i[[i]]$hess + Vbetainv)
+        betaroot = t(chol(betacvar))
+        betac = as.vector(Beta[i,]) + betaroot%*%rnorm(nvar)
+
+        oldlpostbeta[i] = lpostbetai(as.vector(Beta[i,]), alpha, regdata[[i]]$X, regdata[[i]]$y, Delta, Z[i,],Vbetainv)
+        clpostbeta[i] = lpostbetai(betac, alpha, regdata[[i]]$X, regdata[[i]]$y, Delta, Z[i,],Vbetainv)
+        
+        ldiff=clpostbeta[i]-oldlpostbeta[i]
+        acc=min(1,exp(ldiff))
+        if(acc < 1) {unif=runif(1)} else {unif=0}
+
+        if (unif <= acc) {
+            Beta[i,]=betac
+            nacceptbeta=nacceptbeta+1
+        }
+    }
+
+#   Draw alpha
+    logalphac = rnorm(1,mean=log(alpha), sd=alphacroot)
+    oldlpostalpha = lpostalpha(alpha, Beta, regdata, ypooled,  a, b, nreg)
+    clpostalpha = lpostalpha(exp(logalphac), Beta, regdata, ypooled, a, b, nreg)
+    ldiff=clpostalpha-oldlpostalpha
+    acc=min(1,exp(ldiff))
+    if(acc < 1) {unif=runif(1)} else {unif=0}
+    if (unif <= acc) {
+        alpha=exp(logalphac)
+        nacceptalpha=nacceptalpha+1
+    }
+
+#   Draw Vbeta and Delta using rmultireg (bayesm function)
+    temp = rmultireg(Beta,Z,Deltabar,Adelta,nu,V)
+    Vbeta = matrix(temp$Sigma,nrow=nvar)
+    Vbetainv = solve(Vbeta)
+    Delta = temp$B
+
+
+  if(r%%100 == 0)
+    {ctime=proc.time()[3]
+    timetoend=((ctime-itime)/r)*(R-r)
+    cat(" ",r," (",round(timetoend/60,1),")",fill=TRUE)
+    fsh()}
+
+  if(r%%keep == 0) {
+    mkeep=r/keep
+    Betadraw[, ,mkeep]=Beta
+    alphadraw[mkeep] = alpha
+    Vbetadraw[mkeep,] = as.vector(Vbeta)
+    Deltadraw[mkeep,] = as.vector(Delta)
+    ll=0.0
+    for (i in 1:nreg) {ll=ll+llnegbin(c(Beta[i,],alpha),regdata[[i]]$X,regdata[[i]]$y,nvar)}
+    llike[r]=ll
+  }
+}
+ctime = proc.time()[3]
+    
+cat('  Total Time Elapsed: ',round((ctime-itime)/60,2),'\n')
+return(list(llike=llike,Betadraw=Betadraw,alphadraw=alphadraw, Vbetadraw=Vbetadraw, Deltadraw=Deltadraw,
+     acceptrbeta=nacceptbeta/(R*nreg)*100,acceptralpha=nacceptalpha/R*100))
+}
diff --git a/R/rnegbinRw.R b/R/rnegbinRw.R
new file mode 100755
index 0000000..0b7d36d
--- /dev/null
+++ b/R/rnegbinRw.R
@@ -0,0 +1,210 @@
+rnegbinRw = 
+function(Data, Prior, Mcmc) {
+
+#   Revision History
+#	  Sridhar Narayanan - 05/2005
+#         P. Rossi 6/05
+#
+#   Model
+#       (y|lambda,alpha) ~ Negative Binomial(Mean = lambda, Overdispersion par = alpha)
+#
+#       ln(lambda) =  X * beta
+#               
+#   Priors
+#       beta ~ N(betabar, A^-1)
+#       alpha ~ Gamma(a,b) where mean = a/b and variance = a/(b^2)
+#
+#   Arguments
+#       Data = list of y, X
+#              e.g. regdata[[i]]=list(y=y,X=X)
+#              X has nvar columns including a first column of ones
+#
+#       Prior - list containing the prior parameters
+#           betabar, A - mean of beta prior, inverse of variance covariance of beta prior
+#           a, b - parameters of alpha prior
+#
+#       Mcmc - list containing
+#           R is number of draws
+#           keep is thinning parameter (def = 1)
+#           s_beta - scaling parameter for beta RW (def = 2.93/sqrt(nvar))
+#           s_alpha - scaling parameter for alpha RW (def = 2.93)
+#           beta0 - initial guesses for parameters, if not supplied default values are used
+#
+
+
+#
+# Definitions of functions used within rhierNegbinRw
+#
+
+llnegbin = 
+function(par,X,y, nvar) {
+# Computes the log-likelihood
+    beta = par[1:nvar]
+    alpha = exp(par[nvar+1])
+    ll = sum(log(dnbinom(y,size=alpha,mu=exp(X%*%beta))))
+}
+
+lpostbetai = 
+function(beta, alpha, X, y, betabar, A) {
+# Computes the unnormalized log posterior for beta
+    lambda = exp(X %*% beta)
+    p = alpha/(alpha + lambda)
+    residual = as.vector(beta - betabar)
+    sum(alpha * log(p) + y * log(1-p)) - 0.5*( t(residual)%*%A%*%residual)
+}
+
+
+lpostalpha = 
+function(alpha, beta, X,y, a, b) {
+# Computes the unnormalized log posterior for alpha
+    sum(log(dnbinom(y,size=alpha,mu=exp(X%*%beta)))) + (a-1)*log(alpha) - b* alpha
+}
+
+
+#
+# Error Checking
+#
+pandterm=function(message) {stop(message,call.=FALSE)}
+if(missing(Data)) {pandterm("Requires Data argument -- list of X and y")}
+if(is.null(Data$X)) {pandterm("Requires Data element X")} else {X=Data$X}
+if(is.null(Data$y)) {pandterm("Requires Data element y")} else {y=Data$y}
+nvar = ncol(X)
+
+if (length(y) != nrow(X)) {pandterm("Mismatch in the number of observations in X and y")}
+
+#
+# check for prior elements
+#
+if(missing(Prior)) {
+    betabar=rep(0,nvar); A=0.01*diag(nvar) ;  a=0.5; b=0.1;
+}
+else {
+    if(is.null(Prior$betabar)) {betabar=rep(0,nvar)} else {betabar=Prior$betabar}
+    if(is.null(Prior$A)) {A=0.01*diag(nvar)} else {A=Prior$A}
+    if(is.null(Prior$a)) {a=0.5} else {a=Prior$a}
+    if(is.null(Prior$b)) {b=0.1} else {b=Prior$b}
+}
+
+if(length(betabar) != nvar) pandterm("betabar is of incorrect dimension")
+if(sum(dim(A)==c(nvar,nvar)) != 2) pandterm("A is of incorrect dimension")
+if((length(a) != 1) | (a <=0)) pandterm("a should be a positive number")
+if((length(b) != 1) | (b <=0)) pandterm("b should be a positive number")
+
+#
+# check for Mcmc 
+#
+if(missing(Mcmc)) pandterm("Requires Mcmc argument -- at least R")
+if(is.null(Mcmc$R)) {pandterm("Requires element R of Mcmc")} else {R=Mcmc$R}
+if(is.null(Mcmc$beta0)) {beta0=rep(0,nvar)} else {beta0=Mcmc$beta0}
+if(length(beta0) !=nvar) pandterm("beta0 is not of dimension nvar")
+if(is.null(Mcmc$keep)) {keep=1} else {keep=Mcmc$keep}
+if(is.null(Mcmc$s_alpha)) {cat("Using default s_alpha = 2.93",fill=TRUE); s_alpha=2.93} 
+    else {s_alpha = Mcmc$s_alpha} 
+if(is.null(Mcmc$s_beta)) {cat("Using default s_beta = 2.93/sqrt(nvar)",fill=TRUE); s_beta=2.93/sqrt(nvar)} 
+    else {s_beta = Mcmc$s_beta}
+
+#
+# print out problem
+#
+cat(" ",fill=TRUE)
+cat("Starting Random Walk Metropolis Sampler for Negative Binomial Regression",fill=TRUE)
+cat("  ",nobs," obs; ",nvar," covariates (including intercept); ",fill=TRUE)
+cat("Prior Parameters:",fill=TRUE)
+cat("betabar",fill=TRUE)
+print(betabar)
+cat("A",fill=TRUE)
+print(A)
+cat("a",fill=TRUE)
+print(a)
+cat("b",fill=TRUE)
+print(b)
+cat(" ",fill=TRUE)
+cat("MCMC Parms: ",fill=TRUE)
+cat("  ",R," reps; keeping every ",keep,"th draw",fill=TRUE)
+cat("s_alpha = ",s_alpha,fill=TRUE)
+cat("s_beta = ",s_beta,fill=TRUE)
+cat(" ",fill=TRUE)
+
+par = rep(0,(nvar+1))
+cat(" Initializing RW Increment Covariance Matrix...",fill=TRUE)
+fsh()
+mle = optim(par,llnegbin, X=X, y=y, nvar=nvar, method="L-BFGS-B", upper=c(Inf,Inf,Inf,log(100000000)), hessian=TRUE, control=list(fnscale=-1))
+fsh()
+beta_mle=mle$par[1:nvar]
+alpha_mle = exp(mle$par[nvar+1])
+varcovinv = -mle$hessian
+beta = beta0
+betacvar = s_beta*solve(varcovinv[1:nvar,1:nvar])
+betaroot = t(chol(betacvar))
+alpha = alpha_mle
+alphacvar = s_alpha/varcovinv[nvar+1,nvar+1]
+alphacroot = sqrt(alphacvar)
+cat("beta_mle = ",beta_mle,fill=TRUE)
+cat("alpha_mle = ",alpha_mle, fill = TRUE)
+fsh()
+
+oldlpostbeta = 0
+nacceptbeta = 0
+nacceptalpha = 0
+clpostbeta = 0
+
+alphadraw = rep(0,floor(R/keep))
+betadraw=matrix(double(floor(R/keep)*(nvar)),ncol=nvar)
+llike=rep(0,floor(R/keep))
+
+#
+#	start main iteration loop
+#
+itime=proc.time()[3]
+cat(" ",fill=TRUE)
+cat("MCMC Iteration (est time to end - min) ",fill=TRUE)
+fsh()
+
+for (r in 1:R) 
+{
+#   Draw beta
+    betac = beta + betaroot%*%rnorm(nvar)
+    oldlpostbeta = lpostbetai(beta, alpha, X, y, betabar,A)
+    clpostbeta = lpostbetai(betac, alpha, X, y, betabar,A)
+        
+    ldiff=clpostbeta-oldlpostbeta
+    acc=min(1,exp(ldiff))
+    if(acc < 1) {unif=runif(1)} else {unif=0}
+    if (unif <= acc) {
+        beta=betac
+        nacceptbeta=nacceptbeta+1
+    }
+ 
+
+#   Draw alpha
+    logalphac = rnorm(1,mean=log(alpha), sd=alphacroot)
+    oldlpostalpha = lpostalpha(alpha, beta, X, y,  a, b)
+    clpostalpha = lpostalpha(exp(logalphac), beta, X, y, a, b)
+    ldiff=clpostalpha-oldlpostalpha
+    acc=min(1,exp(ldiff))
+    if(acc < 1) {unif=runif(1)} else {unif=0}
+    if (unif <= acc) {
+        alpha=exp(logalphac)
+        nacceptalpha=nacceptalpha+1
+    }
+
+if(r%%100 == 0)
+    {ctime=proc.time()[3]
+    timetoend=((ctime-itime)/r)*(R-r)
+    cat(" ",r," (",round(timetoend/60,1),")",fill=TRUE)
+    fsh()}
+
+if(r%%keep == 0) {
+    mkeep=r/keep
+    betadraw[mkeep,]=beta
+    alphadraw[mkeep] = alpha
+    llike[mkeep]=llnegbin(c(beta,alpha),X,y,nvar)
+    }
+}
+
+ctime = proc.time()[3]
+    
+cat('  Total Time Elapsed: ',round((ctime-itime)/60,2),'\n')
+return(list(llike=llike,betadraw=betadraw,alphadraw=alphadraw,
+     acceptrbeta=nacceptbeta/R*100,acceptralpha=nacceptalpha/R*100))
+}
diff --git a/data/detailing.rda b/data/detailing.rda
new file mode 100755
index 0000000..04bc634
Binary files /dev/null and b/data/detailing.rda differ
diff --git a/inst/doc/bayesm-manual.pdf b/inst/doc/bayesm-manual.pdf
index 061a84a..899bbed 100755
Binary files a/inst/doc/bayesm-manual.pdf and b/inst/doc/bayesm-manual.pdf differ
diff --git a/man/detailing.Rd b/man/detailing.Rd
new file mode 100755
index 0000000..9c0d19c
--- /dev/null
+++ b/man/detailing.Rd
@@ -0,0 +1,113 @@
+\name{detailing}
+\alias{detailing}
+\docType{data}
+\title{ Physician Detailing Data from Manchanda et al (2004)}
+\description{
+  Monthly data on detailing (sales calls) on 1000 physicians. 23 mos of data
+  for each Physician. Includes physician covariates. Dependent Variable is the
+  number of new prescriptions ordered by the physician for the drug detailed.
+}
+\usage{data(detailing)}
+\format{
+ This R object is a list of two data frames, list(counts,demo).
+ 
+ List of 2:
+
+ \$ counts:`data.frame':	23000 obs. of  4 variables:\cr
+  \ldots\$ id        : int [1:23000] 1 1 1 1 1 1 1 1 1 1 \cr
+  \ldots\$ scripts       : int [1:23000] 3 12 3 6 5 2 5 1 5 3 \cr
+  \ldots\$ detailing     : int [1:23000] 1 1 1 2 1 0 2 2 1 1 \cr
+  \ldots\$ lagged\_scripts: int [1:23000] 4 3 12 3 6 5 2 5 1 5 
+
+ \$ demo  :`data.frame':	1000 obs. of  4 variables:\cr
+  \ldots\$ id          : int [1:1000] 1 2 3 4 5 6 7 8 9 10 \cr
+  \ldots\$ generalphys : int [1:1000] 1 0 1 1 0 1 1 1 1 1 \cr
+  \ldots\$ specialist: int [1:1000] 0 1 0 0 1 0 0 0 0 0  \cr
+  \ldots\$ mean\_samples: num [1:1000] 0.722 0.491 0.339 3.196 0.348 
+}
+\details{
+  generalphy is dummy for if doc is a "general practitioner," specialist is dummy for
+  if the physician is a specialist in the theraputic class for which the drug is 
+  intended, mean\_samples is the mean number of free drug samples given the doctor
+  over the sample.
+}
+\source{
+  Manchanda, P., P. K. Chintagunta and P. E. Rossi (2004), "Response Modeling with Non-Random
+  Marketing Mix Variables," \emph{Journal of Marketing Research} 41, 467-478.
+}
+\examples{
+data(detailing)
+cat(" table of Counts Dep Var", fill=TRUE)
+print(table(detailing$counts[,2]))
+cat(" means of Demographic Variables",fill=TRUE)
+mat=apply(as.matrix(detailing$demo[,2:4]),2,mean)
+print(mat)
+
+##
+## example of processing for use with rhierNegbinRw
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+
+data(detailing)
+counts = detailing$counts
+Z = detailing$demo
+
+# Construct the Z matrix
+Z[,1] = 1
+Z[,2]=Z[,2]-mean(Z[,2])
+Z[,3]=Z[,3]-mean(Z[,3])
+Z[,4]=Z[,4]-mean(Z[,4])
+Z=as.matrix(Z)
+id=levels(factor(counts$id))
+nreg=length(id)
+nobs = nrow(counts$id)
+
+regdata=NULL
+for (i in 1:nreg) {
+    X = counts[counts[,1] == id[i],c(3:4)]
+    X = cbind(rep(1,nrow(X)),X)
+    y = counts[counts[,1] == id[i],2]
+    X = as.matrix(X)
+    regdata[[i]]=list(X=X, y=y)
+}
+nvar=ncol(X)            # Number of X variables
+nz=ncol(Z)              # Number of Z variables
+rm(detailing,counts)              
+cat("Finished Reading data",fill=TRUE)
+fsh()
+
+Data = list(regdata=regdata, Z=Z)
+deltabar = matrix(rep(0,nvar*nz),nrow=nz)
+Vdelta = 0.01 * diag(nz)
+nu = nvar+3
+V = 0.01*diag(nvar)
+a = 0.5
+b = 0.1
+Prior = list(deltabar=deltabar, Vdelta=Vdelta, nu=nu, V=V, a=a, b=b)
+
+R = 10000
+keep =1
+s_beta=2.93/sqrt(nvar)
+s_alpha=2.93
+c=2
+Mcmc = list(R=R, keep = keep, s_beta=s_beta, s_alpha=s_alpha, c=c)
+out = rhierNegbinRw(Data, Prior, Mcmc)
+
+# Unit level mean beta parameters
+Mbeta = matrix(rep(0,nreg*nvar),nrow=nreg)
+ndraws = length(out$alphadraw)
+for (i in 1:nreg) { Mbeta[i,] = rowSums(out$Betadraw[i, , ])/ndraws }
+
+cat(" Deltadraws ",fill=TRUE)
+mat=apply(out$Deltadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+cat(" Vbetadraws ",fill=TRUE)
+mat=apply(out$Vbetadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+cat(" alphadraws ",fill=TRUE)
+mat=apply(matrix(out$alphadraw),2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+}
+}
+\keyword{datasets}
diff --git a/man/rhierMnlRwMixture.Rd b/man/rhierMnlRwMixture.Rd
index 80310e1..7a2c414 100755
--- a/man/rhierMnlRwMixture.Rd
+++ b/man/rhierMnlRwMixture.Rd
@@ -93,7 +93,8 @@ rhierMnlRwMixture(Data, Prior, Mcmc)
 \seealso{ \code{\link{rmnlIndepMetrop}} }
 \examples{
 ##
-if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=10000} else {R=10}
+if(nchar(Sys.getenv("LONG_TEST")) != 0) 
+{
 
 set.seed(66)
 p=3                                # num of choice alterns
@@ -141,6 +142,7 @@ deltabar=rep(0,ncoef*nz)
 Amu=matrix(.01,ncol=1)
 a=rep(5,ncoef)
 
+R=10000
 keep=5
 c=2
 s=2.93/sqrt(ncoef)
@@ -205,6 +207,7 @@ tden=mixDen(grid,pvec,comps)
 for(i in 1:ncoef)
 {plot(grid[,i],tden[,i],type="l")}
 }
+}
 
 }
 \keyword{ models }
diff --git a/man/rhierNegbinRw.Rd b/man/rhierNegbinRw.Rd
new file mode 100755
index 0000000..1776eb5
--- /dev/null
+++ b/man/rhierNegbinRw.Rd
@@ -0,0 +1,156 @@
+\name{rhierNegbinRw}
+\alias{rhierNegbinRw}
+\concept{MCMC}
+\concept{hierarchical NBD regression}
+\concept{Negative Binomial regression}
+\concept{Poisson regression}
+\concept{Metropolis algorithm}
+\concept{bayes}
+\title{ MCMC Algorithm for Negative Binomial Regression }
+\description{
+ \code{rhierNegbinRw} implements an MCMC strategy for the hierarchical Negative 
+ Binomial (NBD) regression model. Metropolis steps for each unit level set of 
+ regression parameters are automatically tuned by optimization. Over-dispersion
+ parameter (alpha) is common across units.
+}
+\usage{
+rhierNegbinRw(Data, Prior, Mcmc)
+}
+\arguments{
+  \item{Data}{ list(regdata,Z) }
+  \item{Prior}{ list(Deltabar,Adelta,nu,V,a,b) }
+  \item{Mcmc}{ list(R,keep,s\_beta,s\_alpha,c,Vbeta0,Delta0) }
+}
+\details{
+  Model:   \eqn{y_i} \eqn{\sim}{~} NBD(mean=lambda, over-dispersion=alpha).  \cr
+           \eqn{lambda=exp(X_ibeta_i)}
+
+  Prior:   \eqn{beta_i} \eqn{\sim}{~} \eqn{N(Delta'z_i,Vbeta)}.
+
+           \eqn{vec(Delta|Vbeta)} \eqn{\sim}{~} \eqn{N(vec(Deltabar),Vbeta (x) Adelta)}. \cr
+           \eqn{Vbeta} \eqn{\sim}{~} \eqn{IW(nu,V)}. \cr
+           \eqn{alpha} \eqn{\sim}{~} \eqn{Gamma(a,b)}. \cr
+            note: prior mean of \eqn{alpha = a/b}, \eqn{variance = a/(b^2)}
+
+  list arguments contain:
+  \itemize{
+    \item{\code{regdata}}{ list of lists with data on each of nreg units}
+    \item{\code{regdata[[i]]$X}}{ nobs\_i x nvar matrix of X variables}
+    \item{\code{regdata[[i]]$y}}{ nobs\_i x 1 vector of count responses}
+    \item{\code{Z}}{nreg x nz mat of unit chars (def: vector of ones)}
+    \item{\code{Deltabar}}{ nz x nvar prior mean matrix (def: 0)}
+    \item{\code{Adelta}}{ nz x nz pds prior prec matrix (def: .01I)}
+    \item{\code{nu}}{ d.f. parm for IWishart (def: nvar+3)}
+    \item{\code{V}}{location matrix of IWishart prior (def: nuI)}
+    \item{\code{a}}{ Gamma prior parm (def: .5)}
+    \item{\code{b}}{ Gamma prior parm (def: .1)}
+    \item{\code{R}}{ number of MCMC draws}
+    \item{\code{keep}}{ MCMC thinning parm: keep every keepth draw (def: 1)}
+    \item{\code{s\_beta}}{ scaling for beta| alpha RW inc cov (def: 2.93/sqrt(nvar)}
+    \item{\code{s\_alpha}}{ scaling for alpha | beta RW inc cov (def: 2.93)}
+    \item{\code{c}}{ fractional likelihood weighting parm (def:2)}
+    \item{\code{Vbeta0}}{ starting value for Vbeta (def: I)}
+    \item{\code{Delta0}}{ starting value for Delta (def: 0)}
+  }
+}
+\value{
+  a list containing: 
+  \item{llike}{R/keep vector of values of log-likelihood}
+  \item{betadraw}{nreg x nvar x R/keep array of beta draws}
+  \item{alphadraw}{R/keep vector of alpha draws}
+  \item{acceptrbeta}{acceptance rate of the beta draws}
+  \item{acceptralpha}{acceptance rate of the alpha draws}
+}
+\note{
+  The NBD regression encompasses Poisson regression in the sense that as alpha goes to
+  infinity the NBD distribution tends to the Poisson.\cr
+  For "small" values of alpha, the dependent variable can be extremely variable so that 
+  a large number of observations may be required to obtain precise inferences. 
+
+  For ease of interpretation, we recommend demeaning Z variables.
+}
+
+\seealso{ \code{\link{rnegbinRw}} }
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Sridhar Narayanam & Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+\examples{
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0) 
+{
+##
+set.seed(66)
+simnegbin = 
+function(X, beta, alpha) {
+#   Simulate from the Negative Binomial Regression
+lambda = exp(X \%*\% beta)
+y=NULL
+for (j in 1:length(lambda))
+    y = c(y,rnbinom(1,mu = lambda[j],size = alpha))
+return(y)
+}
+
+nreg = 100        # Number of cross sectional units
+T = 50            # Number of observations per unit
+nobs = nreg*T
+nvar=2            # Number of X variables
+nz=2              # Number of Z variables
+              
+# Construct the Z matrix
+Z = cbind(rep(1,nreg),rnorm(nreg,mean=1,sd=0.125))
+
+Delta = cbind(c(0.4,0.2), c(0.1,0.05))
+alpha = 5
+Vbeta = rbind(c(0.1,0),c(0,0.1))
+
+# Construct the regdata (containing X)
+simnegbindata = NULL
+for (i in 1:nreg) {
+    betai = as.vector(Z[i,]\%*\%Delta) + chol(Vbeta)\%*\%rnorm(nvar)
+    X = cbind(rep(1,T),rnorm(T,mean=2,sd=0.25))
+    simnegbindata[[i]] = list(y=simnegbin(X,betai,alpha), X=X,beta=betai)
+}
+
+Beta = NULL
+for (i in 1:nreg) {Beta=rbind(Beta,matrix(simnegbindata[[i]]$beta,nrow=1))}
+    
+Data = list(regdata=simnegbindata, Z=Z)
+Deltabar = matrix(rep(0,nvar*nz),nrow=nz)
+Vdelta = 0.01 * diag(nvar)
+nu = nvar+3
+V = 0.01*diag(nvar)
+a = 0.5
+b = 0.1
+Prior = list(Deltabar=Deltabar, Vdelta=Vdelta, nu=nu, V=V, a=a, b=b)
+
+R=10000
+keep =1
+s_beta=2.93/sqrt(nvar)
+s_alpha=2.93
+c=2
+Mcmc = list(R=R, keep = keep, s_beta=s_beta, s_alpha=s_alpha, c=c)
+out = rhierNegbinRw(Data, Prior, Mcmc)
+
+# Unit level mean beta parameters
+Mbeta = matrix(rep(0,nreg*nvar),nrow=nreg)
+ndraws = length(out$alphadraw)
+for (i in 1:nreg) { Mbeta[i,] = rowSums(out$Betadraw[i, , ])/ndraws }
+
+cat(" Deltadraws ",fill=TRUE)
+mat=apply(out$Deltadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(Delta),mat); rownames(mat)[1]="Delta"; print(mat)
+cat(" Vbetadraws ",fill=TRUE)
+mat=apply(out$Vbetadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(Vbeta),mat); rownames(mat)[1]="Vbeta"; print(mat)
+cat(" alphadraws ",fill=TRUE)
+mat=apply(matrix(out$alphadraw),2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(alpha),mat); rownames(mat)[1]="alpha"; print(mat)    
+}
+}
+
+\keyword{models}
diff --git a/man/rmnlIndepMetrop.Rd b/man/rmnlIndepMetrop.Rd
index 6a47880..70161a1 100755
--- a/man/rmnlIndepMetrop.Rd
+++ b/man/rmnlIndepMetrop.Rd
@@ -17,7 +17,7 @@ rmnlIndepMetrop(Data, Prior, Mcmc)
   \item{Mcmc}{ list(R,keep,nu) }
 }
 \details{
-  Model:   y \eqn{\sim} {~} MNL(X,beta). \eqn{Pr(y=j) = exp(x_j'beta)/\sum_k{e^{x_k'beta}}}. \cr
+  Model:   y \eqn{\sim}{~} MNL(X,beta). \eqn{Pr(y=j) = exp(x_j'beta)/\sum_k{e^{x_k'beta}}}. \cr
 
   Prior:   \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})} \cr
 
@@ -38,7 +38,13 @@ rmnlIndepMetrop(Data, Prior, Mcmc)
   \item{betadraw}{R/keep x nvar array of beta draws}
   \item{acceptr}{acceptance rate of Metropolis draws}
 }
-
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\author{ Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
 \seealso{ \code{\link{rhierMnlRwMixture}} }
 \examples{
 ##
diff --git a/man/rnegbinRw.Rd b/man/rnegbinRw.Rd
new file mode 100755
index 0000000..643f4d0
--- /dev/null
+++ b/man/rnegbinRw.Rd
@@ -0,0 +1,116 @@
+\name{rnegbinRw}
+\alias{rnegbinRw}
+\concept{MCMC}
+\concept{NBD regression}
+\concept{Negative Binomial regression}
+\concept{Poisson regression}
+\concept{Metropolis algorithm}
+\concept{bayes}
+\title{ MCMC Algorithm for Negative Binomial Regression }
+\description{
+ \code{rnegbinRw} implements a Random Walk Metropolis Algorithm for the Negative 
+ Binomial (NBD) regression model. beta | alpha and alpha | beta are drawn with two
+ different random walks.
+}
+\usage{
+rnegbinRw(Data, Prior, Mcmc)
+}
+\arguments{
+  \item{Data}{ list(y,X) }
+  \item{Prior}{ list(betabar,A,a,b) }
+  \item{Mcmc}{ list(R,keep,s\_beta,s\_alpha,beta0 }
+}
+\details{
+  Model:   \eqn{y} \eqn{\sim}{~} \eqn{NBD(mean=lambda, over-dispersion=alpha)}.  \cr
+           \eqn{lambda=exp(x'beta)}
+
+  Prior:   \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})} \cr
+           \eqn{alpha} \eqn{\sim}{~} \eqn{Gamma(a,b)}. \cr
+            note: prior mean of \eqn{alpha = a/b}, \eqn{variance = a/(b^2)}
+
+  list arguments contain:
+  \itemize{
+    \item{\code{y}}{ nobs vector of counts (0,1,2,\ldots)}
+    \item{\code{X}}{nobs x nvar matrix}
+    \item{\code{betabar}}{ nvar x 1 prior mean (def: 0)}
+    \item{\code{A}}{ nvar x nvar pds prior prec matrix (def: .01I)}
+    \item{\code{a}}{ Gamma prior parm (def: .5)}
+    \item{\code{b}}{ Gamma prior parm (def: .1)}
+    \item{\code{R}}{ number of MCMC draws}
+    \item{\code{keep}}{ MCMC thinning parm: keep every keepth draw (def: 1)}
+    \item{\code{s\_beta}}{ scaling for beta| alpha RW inc cov matrix (def: 2.93/sqrt(nvar)}
+    \item{\code{s\_alpha}}{ scaling for alpha | beta RW inc cov matrix (def: 2.93)}
+  }
+}
+\value{
+  a list containing: 
+  \item{betadraw}{R/keep x nvar array of beta draws}
+  \item{alphadraw}{R/keep vector of alpha draws}
+  \item{llike}{R/keep vector of log-likelihood values evaluated at each draw}
+  \item{acceptrbeta}{acceptance rate of the beta draws}
+  \item{acceptralpha}{acceptance rate of the alpha draws}
+}
+\note{
+  The NBD regression encompasses Poisson regression in the sense that as alpha goes to
+  infinity the NBD distribution tends toward the Poisson.\cr
+  For "small" values of alpha, the dependent variable can be extremely variable so that 
+  a large number of observations may be required to obtain precise inferences.
+}
+
+\seealso{ \code{\link{rhierNegbinRw}} }
+
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+  by Allenby, McCulloch, and Rossi. \cr
+  \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Sridhar Narayanam & Peter Rossi, Graduate School of Business, University of Chicago,
+  \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+
+\examples{
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)  {R=1000} else {R=10}
+
+set.seed(66)
+simnegbin = 
+function(X, beta, alpha) {
+#   Simulate from the Negative Binomial Regression
+lambda = exp(X \%*\% beta)
+y=NULL
+for (j in 1:length(lambda))
+    y = c(y,rnbinom(1,mu = lambda[j],size = alpha))
+return(y)
+}
+
+nobs = 500
+nvar=2            # Number of X variables
+alpha = 5
+Vbeta = diag(nvar)*0.01
+
+# Construct the regdata (containing X)
+simnegbindata = NULL
+beta = c(0.6,0.2)
+X = cbind(rep(1,nobs),rnorm(nobs,mean=2,sd=0.5))
+simnegbindata = list(y=simnegbin(X,beta,alpha), X=X, beta=beta)
+Data = simnegbindata
+betabar = rep(0,nvar)
+A = 0.01 * diag(nvar)
+a = 0.5; b = 0.1
+Prior = list(betabar=betabar, A=A, a=a, b=b)
+
+keep =1
+s_beta=2.93/sqrt(nvar); s_alpha=2.93
+Mcmc = list(R=R, keep = keep, s_beta=s_beta, s_alpha=s_alpha)
+out = rnegbinRw(Data, Prior, Mcmc)
+
+cat(" betadraws ",fill=TRUE)
+mat=apply(out$betadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(beta,mat); rownames(mat)[1]="beta"; print(mat)
+cat(" alphadraws ",fill=TRUE)
+mat=apply(matrix(out$alphadraw),2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(alpha),mat); rownames(mat)[1]="alpha"; print(mat)    
+}
+
+\keyword{ models }
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/r-cran-bayesm.git



More information about the debian-science-commits mailing list