[med-svn] [beast-mcmc] 02/09: Imported Upstream version 1.8.3+dfsg

Andreas Tille tille at debian.org
Thu Jun 2 19:01:51 UTC 2016


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository beast-mcmc.

commit 8de73ae8407298057a942f86b2f0230832124961
Author: Andreas Tille <tille at debian.org>
Date:   Thu Jun 2 13:48:44 2016 +0200

    Imported Upstream version 1.8.3+dfsg
---
 .idea/libraries/lib.xml                            |    4 +-
 .idea/libraries/lib1.xml                           |   23 -
 .idea/libraries/phylogeography.xml                 |    9 -
 .idea/libraries/quaqua.xml                         |    9 -
 .idea/libraries/tracer.xml                         |    9 -
 .idea/libraries/tracer1.xml                        |    9 -
 README.md                                          |    2 +-
 build.xml                                          |   20 +-
 build_coalsim.xml                                  |    8 +-
 build_pathogen.xml                                 |    4 +
 build_pathogen.xml => build_tempest.xml            |   72 +-
 build_tracer.xml                                   |    1 +
 examples/release/clockModels/testStrictClock.xml   |  634 ++--
 ...stUCRelaxedClockContinuousQuantileLogNormal.xml |  375 ++
 .../clockModels/testUCRelaxedClockGamma.xml        |  377 ++
 .../clockModels/testUCRelaxedClockLogNormal.xml    |  651 ++--
 .../clockModels/testUncorrelatedRelaxedClock.xml   |  418 ---
 release/common/README.txt                          |   22 +-
 release/common/VERSION HISTORY.txt                 |   39 +-
 release_tempest/Linux/icons/pathogen.png           |  Bin 0 -> 34709 bytes
 release_tempest/Linux/scripts/pathogen             |   27 +
 release_tempest/common/README.txt                  |   92 +
 .../branchmodel/BranchAssignmentModel.java         |   19 +
 .../lineagespecific/BeagleBranchLikelihood.java    |    9 +-
 .../lineagespecific/BranchLikelihood.java          |    8 +-
 .../lineagespecific/DirichletProcessOperator.java  |  190 +-
 .../DirichletProcessOperatorParser.java            |   21 +-
 .../NewAbstractLikelihoodOnTree.java               |  640 ++++
 .../NewAbstractSequenceLikelihood.java             |  125 +
 ...ihood.java => NewBeagleSequenceLikelihood.java} |  984 +++--
 .../NewFullyConjugateTraitLikelihood.java          |  105 +
 .../AncestralStateTreeLikelihoodParser.java        |   41 +-
 .../evomodel/parsers/FrequencyModelParser.java     |  354 +-
 .../parsers/MarkovJumpsTreeLikelihoodParser.java   |    4 +
 .../parsers/NewBeagleTreeLikelihoodParser.java     |  232 ++
 .../substmodel/ComplexSubstitutionModel.java       |   28 +-
 .../evomodel/substmodel/EmpiricalCodonModel.java   |    4 -
 .../substmodel/GeneralSubstitutionModel.java       |   34 +
 .../beagle/evomodel/substmodel/MG94CodonModel.java |    8 +-
 .../evomodel/substmodel/MG94HKYCodonModel.java     |    5 +-
 .../beagle/evomodel/substmodel/PCACodonModel.java  |    4 -
 .../substmodel/SVSComplexSubstitutionModel.java    |   27 +-
 .../substmodel/SVSGeneralSubstitutionModel.java    |   24 +
 .../treelikelihood/BeagleTreeLikelihood.java       |   17 +-
 .../EvolutionaryProcessDelegate.java}              |   21 +-
 .../treelikelihood/SubstitutionModelDelegate.java  |    2 +-
 .../MassivelyParallelMDSImpl.java                  |   16 +-
 .../MultiDimensionalScalingCore.java               |    9 +-
 .../MultiDimensionalScalingCoreImpl.java           |  482 +--
 .../MultiDimensionalScalingCoreImpl2.java          |    6 +-
 .../MultiDimensionalScalingLikelihood.java         |  164 +-
 .../NativeMDSSingleton.java                        |   30 +-
 .../app/beagle/tools/BeagleSequenceSimulator.java  |    8 +-
 src/dr/app/beast/BeastMain.java                    |   21 +-
 src/dr/app/beast/BeastVersion.java                 |    6 +-
 src/dr/app/beast/RBeastMain.java                   |   63 +
 src/dr/app/beast/development_parsers.properties    |   47 +-
 src/dr/app/beast/release_parsers.properties        |   20 +-
 src/dr/app/beauti/BeautiFrame.java                 |    9 +-
 .../AncestralStatesOptionsPanel.java               |   86 +-
 .../beauti/clockModelsPanel/ClockModelsPanel.java  |  408 +--
 .../beauti/clockModelsPanel/CloneModelDialog.java  |   94 +
 .../clockModelsPanel/OldClockModelsPanel.java      |    1 +
 .../clockModelsPanel/PartitionClockModelPanel.java |   78 +-
 .../AncestralStatesComponentGenerator.java         |    7 +-
 .../AncestralStatesComponentOptions.java           |   10 +
 .../discrete/DiscreteTraitsComponentGenerator.java |   24 +-
 .../marginalLikelihoodEstimation/MLEGSSDialog.java |   33 +-
 .../MarginalLikelihoodEstimationGenerator.java     |  193 +-
 .../beauti/generator/BaseComponentGenerator.java   |    5 +
 src/dr/app/beauti/generator/BeastGenerator.java    |  272 +-
 .../generator/BranchRatesModelGenerator.java       |  155 +-
 .../app/beauti/generator/ComponentGenerator.java   |    8 +
 src/dr/app/beauti/generator/Generator.java         |    6 +
 src/dr/app/beauti/generator/LogGenerator.java      |   33 +-
 .../app/beauti/generator/OperatorsGenerator.java   |   22 +-
 .../beauti/generator/ParameterPriorGenerator.java  |   13 +
 .../generator/SubstitutionModelGenerator.java      |    2 -
 .../beauti/generator/TreeLikelihoodGenerator.java  |   33 +-
 .../app/beauti/generator/TreePriorGenerator.java   |    6 +-
 src/dr/app/beauti/mcmcpanel/MCMCPanel.java         |    2 +
 .../app/beauti/operatorspanel/OperatorsPanel.java  |   27 +
 src/dr/app/beauti/options/BeautiOptions.java       |    7 +-
 src/dr/app/beauti/options/DateGuesser.java         |   80 +-
 src/dr/app/beauti/options/ModelOptions.java        |   24 +-
 src/dr/app/beauti/options/Parameter.java           |    9 +
 src/dr/app/beauti/options/PartitionClockModel.java |   61 +-
 .../options/PartitionClockModelTreeModelLink.java  |   31 +-
 src/dr/app/beauti/options/PartitionOptions.java    |    4 -
 .../beauti/options/PartitionSubstitutionModel.java |   87 +-
 src/dr/app/beauti/options/PartitionTreeModel.java  |   62 +-
 .../siteModelsPanel/PartitionModelPanel.java       |    8 +-
 .../app/beauti/tipdatepanel/GuessDatesDialog.java  |  121 +-
 src/dr/app/beauti/tipdatepanel/TipDatesPanel.java  |  166 +-
 .../beauti/treespanel/PartitionTreePriorPanel.java |    6 +-
 src/dr/app/beauti/types/ClockType.java             |    2 +-
 .../types/OperatorSetType.java}                    |   24 +-
 src/dr/app/beauti/types/OperatorType.java          |    2 +
 src/dr/app/beauti/types/PriorType.java             |    6 +-
 src/dr/app/beauti/types/TreePriorType.java         |    4 +-
 src/dr/app/bss/BeagleSequenceSimulatorApp.java     |   21 +-
 src/dr/app/bss/Changelog                           |    4 +-
 src/dr/app/bss/PartitionData.java                  |    9 +-
 src/dr/app/bss/README.textile                      |    5 +-
 src/dr/app/bss/Utils.java                          |   12 +-
 src/dr/app/bss/test/BeagleSeqSimTest.java          |    4 +-
 src/dr/app/gui/chart/JChart.java                   |    3 +-
 src/dr/app/oldbeauti/BeastGenerator.java           | 3007 ----------------
 src/dr/app/oldbeauti/BeautiApp.java                |  136 -
 src/dr/app/oldbeauti/BeautiFrame.java              |  656 ----
 src/dr/app/oldbeauti/BeautiOptions.java            | 1933 ----------
 src/dr/app/oldbeauti/BeautiTester.java             |  417 ---
 src/dr/app/oldbeauti/CommandLineBeauti.java        |  261 --
 src/dr/app/oldbeauti/DataPanel.java                |  699 ----
 src/dr/app/oldbeauti/DiscretePriorDialog.java      |  169 -
 src/dr/app/oldbeauti/MCMCPanel.java                |  219 --
 src/dr/app/oldbeauti/ModelPanel.java               |  484 ---
 src/dr/app/oldbeauti/NexusApplicationImporter.java |  416 ---
 src/dr/app/oldbeauti/OperatorsPanel.java           |  285 --
 src/dr/app/oldbeauti/PriorDialog.java              |  419 ---
 src/dr/app/oldbeauti/PriorType.java                |  140 -
 src/dr/app/oldbeauti/PriorsPanel.java              |  509 ---
 src/dr/app/oldbeauti/TaxaPanel.java                |  816 -----
 src/dr/app/oldbeauti/XMLWriter.java                |  138 -
 src/dr/app/pathogen/TemporalRooting.java           |   15 +-
 src/dr/app/tempest/ParentPlot.java                 |  129 +
 src/dr/app/tempest/RootToTip.java                  |  328 ++
 src/dr/app/tempest/SamplesPanel.java               |  461 +++
 src/dr/app/tempest/TempEstApp.java                 |  141 +
 .../TempestDefaultFileMenuFactory.java}            |   58 +-
 src/dr/app/tempest/TempestFrame.java               |  371 ++
 .../TempestMacFileMenuFactory.java}                |  112 +-
 .../TempestMenuBarFactory.java}                    |   15 +-
 src/dr/app/tempest/TempestPanel.java               |  907 +++++
 .../app/{pathogen => tempest}/TemporalRooting.java |   19 +-
 src/dr/app/tempest/TemporalStress.java             |  149 +
 src/dr/app/{oldbeauti => tempest}/TreeUtils.java   |    8 +-
 .../app/{oldbeauti => tempest}/images/beauti.png   |  Bin
 src/dr/app/tempest/images/coloursTool.png          |  Bin 0 -> 1915 bytes
 .../app/{oldbeauti => tempest}/images/exclude.png  |  Bin
 src/dr/app/{oldbeauti => tempest}/images/gear.png  |  Bin
 .../app/{oldbeauti => tempest}/images/include.png  |  Bin
 src/dr/app/tempest/images/tempest.png              |  Bin 0 -> 34709 bytes
 src/dr/app/tools/TreeAnnotator.java                |   62 +-
 .../application/TracerMacFileMenuFactory.java      |   41 +-
 .../tracer/application/TracerMenuBarFactory.java   |    2 +-
 src/dr/evolution/alignment/Alignment.java          |    8 +-
 src/dr/evolution/alignment/PairedSitePatterns.java |    5 +
 src/dr/evolution/alignment/PatternList.java        |    6 +
 src/dr/evolution/alignment/Patterns.java           |    5 +
 src/dr/evolution/alignment/ResamplePatterns.java   |    7 +-
 src/dr/evolution/alignment/SimpleAlignment.java    |    5 +
 src/dr/evolution/alignment/SimpleSiteList.java     |    7 +-
 src/dr/evolution/alignment/SitePatterns.java       |    5 +
 .../coalescent/CataclysmicDemographic.java         |    4 +-
 .../coalescent/MultiEpochExponential.java          |  195 +
 src/dr/evolution/datatype/Microsatellite.java      |    7 +-
 src/dr/evolution/io/NexusImporter.java             |   38 +-
 src/dr/evomodel/antigenic/AntigenicLikelihood.java |    1 -
 .../evomodel/antigenic/NPAntigenicLikelihood.java  |    4 -
 .../Mu1ScaleActiveScaledMu1IntactOperator.java     |  277 ++
 .../Mu2ScaleActiveScaledMu2IntactOperator.java     |  278 ++
 .../ProbGenericSiteGibbsOperator.java              |  193 +
 .../MCMCOperators/ProbSitesGibbsOperator.java      |  184 +
 .../MCMCOperators/RandomWalkOnActiveMu.java        |  247 ++
 .../TreeClusterAlgorithmOperator.java              | 3801 ++++++++++++++++++++
 .../muMeanTranslateInactiveMu1Operator.java        |  219 ++
 .../muPrecisionInactiveMuOperator.java             |  224 ++
 .../MCMCOperators/randomWalkSerumDriftAndMu.java   |  232 ++
 .../serumDriftActiveScaledMu1Operator.java         |  289 ++
 .../serumPrecisionSerumLocOperator.java            |  199 +
 .../TreeClusteringVirusesPrior.java                | 1778 +++++++++
 .../Tree_Clustering_Shared_Routines.java           |  224 ++
 .../misc/mergeAdjacencyMatrixPlots.java            |  276 ++
 .../misc/obsolete/AGLikelihoodCluster.java}        |  287 +-
 .../misc/obsolete/AGLikelihoodTreeCluster.java}    |  782 +++-
 .../misc/obsolete/ClusterAlgorithmOperator.java    |  619 ++++
 .../misc/obsolete/ClusterComparison.java           |    8 +
 .../misc/obsolete/ClusterOperator.java             |  443 +++
 .../misc/obsolete/ClusterViruses.java              |  619 ++++
 .../misc/obsolete/ClusterWalkOperator.java         |  225 ++
 .../misc/obsolete/ClusterWalkOperatorParser.java   |   92 +
 .../phyloClustering/misc/obsolete/OrderDouble.java |   52 +
 .../misc/obsolete/TiterImporter.java               |  344 ++
 .../misc/obsolete/TreeClusterGibbsOperator.java    | 1090 ++++++
 .../obsolete/TreeClusterSequentialSampling.java    | 1482 ++++++++
 .../misc/obsolete/serumDriftScalingMuOperator.java |  214 ++
 .../phyloClustering/misc/simulateClusters.java     |  689 ++++
 .../statistics/ActiveIndicatorsStatistic.java      |  125 +
 .../AnnotateLocationParameterTreeTrait.java        |  214 ++
 .../statistics/AnnotateMuTreeTrait.java            |  215 ++
 .../statistics/CausalMutationsLogger.java          |  133 +
 .../statistics/ClusterLabelsVirusesStatistic.java  |  345 ++
 .../statistics/ClusterLabelsVirusesTreeTrait.java  |  158 +
 .../statistics/DriftedMuStatistic.java             |  168 +
 .../DriftedTreeClusterLocationsStatistic.java      |  158 +
 .../statistics/DriverCountStatistic.java           |  122 +
 .../phyloClustering/statistics/KStatistic.java     |   98 +
 .../statistics/MutationsTreeTrait.java             |  275 ++
 .../statistics/NodeNumberTreeTrait.java            |  126 +
 .../phyloClustering/statistics/PathStatistic.java  |  288 ++
 .../statistics/indicatorsStatistic.java            |   94 +
 .../phyloClustering/statistics/muStatistic.java    |  118 +
 src/dr/evomodel/arg/ARGRelaxedClock.java           |    2 -
 .../branchratemodel/ContinuousBranchRates.java     |  180 +-
 .../branchratemodel/DecayingRateModel.java         |    1 -
 .../DiscreteTraitBranchRateModel.java              |    1 -
 .../branchratemodel/DiscretizedBranchRates.java    |   21 +-
 .../branchratemodel/RateEpochBranchRateModel.java  |    1 -
 .../branchratemodel/ScaledTreeLengthRateModel.java |    1 -
 .../branchratemodel/StrictClockBranchRates.java    |    1 -
 .../branchratemodel/TipBranchRateModel.java        |    1 -
 src/dr/evomodel/clock/RateEvolutionLikelihood.java |    2 -
 src/dr/evomodel/clock/UniversalClock.java          |    2 -
 .../coalescent/AbstractCoalescentLikelihood.java   |   26 +
 .../evomodel/coalescent/AsymptoticGrowthModel.java |    1 -
 .../coalescent/CataclysmicDemographicModel.java    |   24 +-
 .../coalescent/CoalescentEventsStatistic.java      |  198 +-
 .../coalescent/CoalescentIntervalProvider.java     |   14 +-
 .../coalescent/CoalescentIntervalStatistic.java    |   16 +-
 .../evomodel/coalescent/CoalescentLikelihood.java  |    2 +-
 .../evomodel/coalescent/EmergingEpidemicModel.java |    1 -
 .../coalescent/ExponentialConstantModel.java       |    2 -
 .../coalescent/ExponentialExponentialModel.java    |    3 -
 ...ExponentialProductPosteriorMeansLikelihood.java |    2 +-
 .../coalescent/ExponentialSkythingLikelihood.java  |    2 -
 .../evomodel/coalescent/GMRFDensityComponent.java  |    9 +
 .../GMRFMultilocusSkyrideLikelihood.java           |   57 +-
 .../evomodel/coalescent/GMRFSkyrideLikelihood.java |   12 +-
 ...aussianProcessMultilocusSkytrackLikelihood.java |    1 -
 src/dr/evomodel/coalescent/LinearGrowthModel.java  |    2 -
 ...tModel.java => MultiEpochExponentialModel.java} |   71 +-
 .../evomodel/coalescent/PeakAndDeclineModel.java   |    2 -
 .../evomodel/coalescent/PowerLawGrowthModel.java   |    2 -
 .../coalescent/TwoEpochDemographicModel.java       |    4 +-
 .../AbstractMultivariateTraitLikelihood.java       |    2 -
 .../FullyConjugateMultivariateTraitLikelihood.java |  191 +-
 .../continuous/GaussianProcessFromTree.java        |   63 +-
 .../IntegratedMultivariateTraitLikelihood.java     |    2 +-
 src/dr/evomodel/epidemiology/SIRModel.java         |    9 +
 .../epidemiology/casetocase/AbstractOutbreak.java  |    5 +
 .../CaseToCaseTransmissionLikelihood.java          |  603 ++--
 .../casetocase/CaseToCaseTreeLikelihood.java       |  323 +-
 .../epidemiology/casetocase/CategoryOutbreak.java  |   63 +-
 .../casetocase/PartitionedTreeModel.java           |  263 +-
 .../casetocase/WithinCaseCoalescent.java           |  107 +-
 .../operators/InfectionBranchGibbsOperator.java    |  235 --
 .../operators/InfectionBranchMovementOperator.java |   44 +-
 .../operators/TransmissionExchangeOperatorA.java   |   14 +-
 .../operators/TransmissionExchangeOperatorB.java   |    4 -
 .../operators/TransmissionSubtreeSlideA.java       |    5 +-
 .../operators/TransmissionSubtreeSlideB.java       |    7 +-
 .../operators/TransmissionWilsonBaldingA.java      |   15 +-
 .../operators/TransmissionWilsonBaldingB.java      |    4 +-
 ...KnownVarianceNormalPeriodPriorDistribution.java |    1 -
 .../NormalPeriodPriorDistribution.java             |    1 -
 src/dr/evomodel/lineage/LineageSitePatterns.java   |    5 +
 .../operators/LatentFactorHamiltonianMC.java       |  161 +
 .../evomodel/operators/LatentLiabilityGibbs.java   |   47 +-
 .../operators/MicrosatUpDownOperator.java          |  325 +-
 .../MicrosatelliteModelSelectOperator.java         |  141 +-
 .../operators/MsatBitFlipOperator.java             |  315 +-
 ...MsatFullAncestryImportanceSamplingOperator.java |  253 +-
 .../MsatSingleAncestralStateGibbsOperator.java     |  250 +-
 ...andomWalkIntegerNodeHeightWeightedOperator.java |  177 +-
 .../RandomWalkIntegerSetSizeWeightedOperator.java  |  229 +-
 src/dr/evomodel/operators/SubtreeJumpOperator.java |   41 +-
 src/dr/evomodel/operators/SubtreeLeapOperator.java |  321 ++
 .../substmodel/ComplexSubstitutionModel.java       |    7 +
 .../evomodel/substmodel/EmpiricalCodonModel.java   |    2 -
 src/dr/evomodel/substmodel/HKY.java                |    1 -
 src/dr/evomodel/substmodel/PCACodonModel.java      |    3 -
 .../substmodel/SVSGeneralSubstitutionModel.java    |   13 +-
 .../substmodel/SubstitutionEpochModel.java         |    3 -
 src/dr/evomodel/tree/HiddenLinkageModel.java       |   11 +-
 src/dr/evomodel/tree/TreeModel.java                |    2 +-
 src/dr/evomodel/tree/TreeParameterModel.java       |   29 +-
 .../EmpiricalTreeDistributionModelParser.java      |    2 +-
 src/dr/evomodelxml/TreeWorkingPriorParsers.java    |  157 +-
 .../ContinuousBranchRatesParser.java               |   36 +-
 .../DiscretizedBranchRatesParser.java              |    9 +-
 .../CataclysmicDemographicModelParser.java         |   27 +-
 ....java => MultiEpochExponentialModelParser.java} |  190 +-
 .../operators/LatentFactorHamiltonianMCParser.java |   65 +
 .../operators/MicrosatUpDownOperatorParser.java    |  201 +-
 .../MicrosatelliteModelSelectOperatorParser.java   |  144 +-
 .../operators/MsatBitFlipOperatorParser.java       |  170 +-
 ...llAncestryImportanceSamplingOperatorParser.java |  148 +-
 ...satSingleAncestralStateGibbsOperatorParser.java |  144 +-
 ...alkIntegerNodeHeightWeightedOperatorParser.java |  167 +-
 ...omWalkIntegerSetSizeWeightedOperatorParser.java |  172 +-
 .../operators/SubtreeLeapOperatorParser.java       |   83 +
 .../treelikelihood/TreeTraitParserUtilities.java   |    7 +-
 src/dr/evoxml/AttributePatternsParser.java         |    5 +-
 src/dr/evoxml/MicrosatellitePatternParser.java     |   37 +-
 .../inference/distribution/BinomialLikelihood.java |    2 -
 .../distribution/GammaDistributionModel.java       |  106 +-
 .../InverseGammaDistributionModel.java             |    3 -
 .../distribution/LogNormalDistributionModel.java   |    1 -
 .../MultivariateDistributionLikelihood.java        |   13 +-
 .../MultivariateNormalDistributionModel.java       |   18 +-
 .../distribution/NormalDistributionModel.java      |   13 +-
 .../TruncatedNormalDistributionModel.java          |    2 +-
 src/dr/inference/markovchain/MarkovChain.java      |   21 +-
 .../markovjumps/TwoStateOccupancyMarkovReward.java |  133 +-
 src/dr/inference/mcmc/DebugUtils.java              |  102 +-
 src/dr/inference/mcmc/MCMC.java                    |   14 +-
 src/dr/inference/model/AbstractModel.java          |    7 +-
 .../inference/model/AbstractModelLikelihood.java   |    9 +
 .../model/BlockUpperTriangularMatrixParameter.java |   10 +-
 .../model/CompoundFastMatrixParameter.java         |  210 ++
 src/dr/inference/model/CompoundLikelihood.java     |   13 +-
 src/dr/inference/model/CompoundParameter.java      |   20 +-
 src/dr/inference/model/DesignMatrix.java           |    9 +-
 src/dr/inference/model/DiagonalMatrix.java         |    2 +
 src/dr/inference/model/FastMatrixParameter.java    |  276 ++
 src/dr/inference/model/LatentFactorModel.java      |  471 ++-
 src/dr/inference/model/Likelihood.java             |   12 +
 src/dr/inference/model/MatrixParameter.java        |   50 +-
 .../model/MatrixParameterInterface.java}           |   41 +-
 src/dr/inference/model/Model.java                  |    3 +-
 src/dr/inference/model/Parameter.java              |   32 +-
 src/dr/inference/model/PathLikelihood.java         |   10 +
 src/dr/inference/model/SumParameter.java           |  129 +
 .../model/TestThreadedCompoundLikelihood.java      |   13 +
 .../model/ThreadedCompoundLikelihood.java          |   12 +
 .../operators/AbstractHamiltonianMCOperator.java   |   30 +
 ...daptableVarianceMultivariateNormalOperator.java |  156 +-
 .../operators/EllipticalSliceOperator.java         |  152 +-
 .../inference/operators/LoadingsGibbsOperator.java |    3 +-
 src/dr/inference/operators/OperatorSchedule.java   |   22 +-
 .../operators/SimpleOperatorSchedule.java          |   15 +-
 .../CompoundGaussianProcessParser.java             |  126 +
 .../distribution/DistributionLikelihoodParser.java |    1 +
 .../distribution/DistributionModelParser.java      |    1 +
 .../distribution/GammaDistributionModelParser.java |   63 +-
 .../distribution/GeneralizedLinearModelParser.java |   32 +-
 src/dr/inferencexml/distribution/PriorParsers.java |   54 +-
 .../distribution/WorkingPriorParsers.java          |  214 +-
 src/dr/inferencexml/loggers/LoggerParser.java      |    4 +-
 .../model/CompoundLikelihoodParser.java            |    5 +-
 .../model/ImmutableParameterParser.java            |   85 +
 .../model/LatentFactorModelParser.java             |   25 +-
 src/dr/inferencexml/model/SumParameterParser.java  |   79 +
 .../operators/SimpleOperatorScheduleParser.java    |    9 +-
 src/dr/math/MathUtils.java                         |  197 +-
 src/dr/math/MersenneTwisterFast.java               |  447 +--
 .../distributions/CompoundGaussianProcess.java     |   99 +
 .../math/distributions/DirichletDistribution.java  |   73 +-
 src/dr/math/distributions/GammaDistribution.java   |    7 +
 .../distributions/InverseGaussianDistribution.java |    3 +-
 .../distributions/MultivariateKDEDistribution.java |    5 +-
 .../MultivariateNormalDistribution.java            |   43 +-
 .../MultivariatePolyaDistributionLikelihood.java   |    9 +
 src/dr/math/distributions/RandomGenerator.java     |    4 +-
 src/dr/util/NumberFormatter.java                   |    8 +-
 src/dr/util/Transform.java                         |  198 +-
 src/dr/xml/Report.java                             |    2 +-
 src/test/dr/distibutions/InvGammaTest.java         |   26 +-
 .../UncorrelatedRelaxedClockTest.java              |  546 +--
 .../coalescent/MultiEpochExponentialTest.java      |  171 +
 .../TwoStateOccupancyMarkovRewardsTest.java        |  137 +-
 362 files changed, 33853 insertions(+), 18375 deletions(-)

diff --git a/.idea/libraries/lib.xml b/.idea/libraries/lib.xml
index 11c4023..fa8838a 100644
--- a/.idea/libraries/lib.xml
+++ b/.idea/libraries/lib.xml
@@ -1,10 +1,10 @@
 <component name="libraryTable">
   <library name="lib">
     <CLASSES>
-      <root url="jar://$PROJECT_DIR$/release/Mac/lib/quaqua-filechooser-only.jar!/" />
-      <root url="jar://$PROJECT_DIR$/release/Mac/lib/quaqua.jar!/" />
+      <root url="file://$PROJECT_DIR$/lib" />
     </CLASSES>
     <JAVADOC />
     <SOURCES />
+    <jarDirectory url="file://$PROJECT_DIR$/lib" recursive="false" />
   </library>
 </component>
\ No newline at end of file
diff --git a/.idea/libraries/lib1.xml b/.idea/libraries/lib1.xml
deleted file mode 100644
index bd84618..0000000
--- a/.idea/libraries/lib1.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<component name="libraryTable">
-  <library name="lib1">
-    <CLASSES>
-      <root url="jar://$PROJECT_DIR$/lib/commons-math-2.2.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/colt.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/itext-1.4.5.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/org.boehn.kmlframework_20090320.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/JRI.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/beagle.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/jdom.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/options.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/figtreepanel.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/freemarker.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/junit-4.4.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/mtj.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/mpj.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/jebl.jar!/" />
-      <root url="jar://$PROJECT_DIR$/lib/jam.jar!/" />
-    </CLASSES>
-    <JAVADOC />
-    <SOURCES />
-  </library>
-</component>
\ No newline at end of file
diff --git a/.idea/libraries/phylogeography.xml b/.idea/libraries/phylogeography.xml
deleted file mode 100644
index 8bd4c61..0000000
--- a/.idea/libraries/phylogeography.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<component name="libraryTable">
-  <library name="phylogeography">
-    <CLASSES>
-      <root url="jar://$PROJECT_DIR$/release_phylogeography/common/lib/phylogeography.jar!/" />
-    </CLASSES>
-    <JAVADOC />
-    <SOURCES />
-  </library>
-</component>
\ No newline at end of file
diff --git a/.idea/libraries/quaqua.xml b/.idea/libraries/quaqua.xml
deleted file mode 100644
index a6c2674..0000000
--- a/.idea/libraries/quaqua.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<component name="libraryTable">
-  <library name="quaqua">
-    <CLASSES>
-      <root url="jar://$PROJECT_DIR$/release_tracer/Mac/lib/quaqua.jar!/" />
-    </CLASSES>
-    <JAVADOC />
-    <SOURCES />
-  </library>
-</component>
\ No newline at end of file
diff --git a/.idea/libraries/tracer.xml b/.idea/libraries/tracer.xml
deleted file mode 100644
index a95172d..0000000
--- a/.idea/libraries/tracer.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<component name="libraryTable">
-  <library name="tracer">
-    <CLASSES>
-      <root url="jar://$PROJECT_DIR$/release_tracer/Windows/Tracer v1.6pre/lib/tracer.jar!/" />
-    </CLASSES>
-    <JAVADOC />
-    <SOURCES />
-  </library>
-</component>
\ No newline at end of file
diff --git a/.idea/libraries/tracer1.xml b/.idea/libraries/tracer1.xml
deleted file mode 100644
index aa8ed12..0000000
--- a/.idea/libraries/tracer1.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<component name="libraryTable">
-  <library name="tracer1">
-    <CLASSES>
-      <root url="jar://$PROJECT_DIR$/release_tracer/Windows/Tracer v1.5/lib/tracer.jar!/" />
-    </CLASSES>
-    <JAVADOC />
-    <SOURCES />
-  </library>
-</component>
\ No newline at end of file
diff --git a/README.md b/README.md
index aaa3b8a..9b1e7c9 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@ We also include a program that can convert NEXUS files into this format.
 
 ## Download BEAST
 
-[Download BEAST v1.8.1 binaries for Mac, Windows and UNIX/Linux](http://tree.bio.ed.ac.uk/software/beast/)
+[Download BEAST v1.8.2 binaries for Mac, Windows and UNIX/Linux](http://tree.bio.ed.ac.uk/software/beast/)
 
 [Older BEAST Downloads](https://code.google.com/p/beast-mcmc/downloads)
 
diff --git a/build.xml b/build.xml
index 39e5840..fa496f5 100644
--- a/build.xml
+++ b/build.xml
@@ -290,7 +290,10 @@
         <echo message="JUnit test report finished."/>
     </target>
 
-    <property name="version" value="1.8.3pre"/>
+    <property name="version" value="1.8.3"/>
+    <!--
+    <property name="version" value="1.8.3pre20160105"/>
+    -->
     <property name="version_number" value="1.8.3"/>
     <property name="release_dir" value="release"/>
     <property name="BEAST_name" value="BEAST"/>
@@ -495,12 +498,13 @@
                     name="${BEAST_name} v${version}"
                     mainclass="dr.app.beast.BeastMain"
                     stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+					useJavaXKey="true"
                     icon="${common_dir}/icons/beast.icns"
                     jvmversion="1.6+"
                     vmoptions="-d64 -Xmx2048M"
                     arguments="-window -working -options"
                     version="${version}"
-                    copyright="${BEAST_name} v${version}, Copyright 2002-2015, Alexei Drummond, Andrew Rambaut and Marc Suchard"
+                    copyright="${BEAST_name} v${version}, Copyright 2002-2016, Alexei Drummond, Andrew Rambaut and Marc Suchard"
                     bundleid="dr.app.beast">
             <javaproperty name="apple.laf.useScreenMenuBar" value="true"/>
             <javaproperty name="java.library.path" value="$JAVAROOT:/usr/local/lib"/>
@@ -517,12 +521,13 @@
                     name="${BEAUti_name} v${version}"
                     mainclass="dr.app.beauti.BeautiApp"
                     stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+					useJavaXKey="true"
                     icon="${common_dir}/icons/beauti.icns"
                     jvmversion="1.6+"
                     vmoptions="-d64 -Xmx2048M"
                     arguments=""
                     version="${version}"
-                    copyright="${BEAUti_name} v${version}, Copyright 2002-2015, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
+                    copyright="${BEAUti_name} v${version}, Copyright 2002-2016, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
                     bundleid="dr.app.beauti">
             <documenttype name="BEAUti document"
                           extensions="beauti bti"
@@ -545,12 +550,13 @@
                     name="${LogCombiner_name} v${version}"
                     mainclass="dr.app.tools.LogCombiner"
                     stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+					useJavaXKey="true"
                     icon="${common_dir}/icons/utility.icns"
                     jvmversion="1.6+"
                     vmoptions="-d64 -Xmx2048M"
                     arguments=""
                     version="${version}"
-                    copyright="${LogCombiner_name} v${version}, Copyright 2002-2015, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
+                    copyright="${LogCombiner_name} v${version}, Copyright 2002-2016, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
                     bundleid="dr.app.tools.logcombiner">
             <jarfileset dir="${Mac_package_dir}/lib">
                 <include name="**/beast.jar"/>
@@ -563,12 +569,13 @@
                     name="${TreeAnnotator_name} v${version}"
                     mainclass="dr.app.tools.TreeAnnotator"
                     stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+					useJavaXKey="true"
                     icon="${common_dir}/icons/utility.icns"
                     jvmversion="1.6+"
                     vmoptions="-d64 -Xmx4096M"
                     arguments=""
                     version="${version}"
-                    copyright="${TreeAnnotator_name} v${version}, Copyright 2002-2015, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
+                    copyright="${TreeAnnotator_name} v${version}, Copyright 2002-2016, Alexei Drummond, Andrew Rambaut, Marc Suchard and Walter Xie"
                     bundleid="dr.app.tools.treeannotator">
             <jarfileset dir="${Mac_package_dir}/lib">
                 <include name="**/beast.jar"/>
@@ -581,12 +588,13 @@
                     name="${TreeStat_name} v${version}"
                     mainclass="dr.app.treestat.TreeStatApp"
                     stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+					useJavaXKey="true"
                     icon="${common_dir}/icons/TreeStat.icns"
                     jvmversion="1.6+"
                     vmoptions="-d64 -Xmx1024M"
                     arguments=""
                     version="${version}"
-                    copyright="${TreeStat_name} v${version}, Copyright 2002-2015, Alexei Drummond and Andrew Rambaut"
+                    copyright="${TreeStat_name} v${version}, Copyright 2002-2016, Alexei Drummond and Andrew Rambaut"
                     bundleid="dr.app.treestat">
             <jarfileset dir="${Mac_package_dir}/lib">
                 <include name="**/beast.jar"/>
diff --git a/build_coalsim.xml b/build_coalsim.xml
index c576f07..8a66005 100644
--- a/build_coalsim.xml
+++ b/build_coalsim.xml
@@ -8,7 +8,7 @@
     <property name="build" location="build"/>
     <property name="lib" location="lib"/>
     <property name="dist" location="dist"/>
-
+                                                                                2
     <property environment="env"/>
 
     <target name="init">
@@ -72,6 +72,8 @@
             </manifest>
             <fileset dir="${build}">
                 <include name="dr/app/coalgen/**/*.class"/>
+                <include name="dr/app/beauti/**/*.class"/>
+                <include name="dr/app/gui/**/*.class"/>
                 <include name="dr/app/util/**/*.class"/>
                 <include name="dr/evolution/alignment/**/*.class"/>
                 <include name="dr/evolution/coalescent/**/*.class"/>
@@ -100,7 +102,7 @@
             </fileset>
             <zipgroupfileset dir="${lib}" includes="**/jebl.jar"/>
             <zipgroupfileset dir="${lib}" includes="**/jam.jar"/>
-            <zipgroupfileset dir="${lib}" includes="**/commons-math-2.0.jar"/>
+            <zipgroupfileset dir="${lib}" includes="**/commons-math-2.2.jar"/>
         </jar>
     </target>
-</project>
\ No newline at end of file
+</project>
diff --git a/build_pathogen.xml b/build_pathogen.xml
index ba6b048..e4df81c 100644
--- a/build_pathogen.xml
+++ b/build_pathogen.xml
@@ -24,6 +24,10 @@
         <mkdir dir="${dist}"/>
     </target>
 
+    <target name="clean">
+        <delete dir="${build}"/>
+    </target>
+
     <target name="compile" depends="init">
         <!-- Compile the java code from ${src} into ${build} -->
         <javac source="1.6" srcdir="${src}" destdir="${build}" classpathref="classpath">
diff --git a/build_pathogen.xml b/build_tempest.xml
similarity index 78%
copy from build_pathogen.xml
copy to build_tempest.xml
index ba6b048..b31d664 100644
--- a/build_pathogen.xml
+++ b/build_tempest.xml
@@ -1,7 +1,31 @@
-<project name="Pathogen" default="build" basedir=".">
+<!--
+  ~ build_tempest.xml
+  ~
+  ~ Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+  ~
+  ~ This file is part of BEAST.
+  ~ See the NOTICE file distributed with this work for additional
+  ~ information regarding copyright ownership and licensing.
+  ~
+  ~ BEAST is free software; you can redistribute it and/or modify
+  ~ it under the terms of the GNU Lesser General Public License as
+  ~ published by the Free Software Foundation; either version 2
+  ~ of the License, or (at your option) any later version.
+  ~
+  ~  BEAST is distributed in the hope that it will be useful,
+  ~  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  ~  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  ~  GNU Lesser General Public License for more details.
+  ~
+  ~ You should have received a copy of the GNU Lesser General Public
+  ~ License along with BEAST; if not, write to the
+  ~ Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+  ~ Boston, MA  02110-1301  USA
+  -->
+
+<project name="TempEst" default="build" basedir=".">
     <description>
-        Build file for PATHOGEN release versions, It is also used by Hudson Pathogen project.
-        $Id: build_pathogen.xml,v 1.5 2006/09/09 18:16:17 rambaut Exp $
+        Build file for TempEst release versions.
     </description>
     <!-- set global properties for this build -->
     <property name="src" location="src"/>
@@ -24,10 +48,14 @@
         <mkdir dir="${dist}"/>
     </target>
 
+    <target name="clean">
+        <delete dir="${build}"/>
+    </target>
+
     <target name="compile" depends="init">
         <!-- Compile the java code from ${src} into ${build} -->
         <javac source="1.6" srcdir="${src}" destdir="${build}" classpathref="classpath">
-            <include name="dr/app/pathogen/**"/>
+            <include name="dr/app/tempest/**"/>
             <include name="dr/app/beauti/**"/>
             <include name="dr/app/tools/**"/>
             <include name="dr/app/util/**"/>
@@ -51,13 +79,13 @@
         <mkdir dir="${dist}"/>
 
         <!-- Put everything in ${build} into the treestat.jar file -->
-        <jar jarfile="${dist}/pathogen.jar">
+        <jar jarfile="${dist}/tempest.jar">
             <manifest>
                 <attribute name="Built-By" value="${user.name}"/>
-                <attribute name="Main-Class" value="dr.app.pathogen.PathogenApp"/>
+                <attribute name="Main-Class" value="dr.app.tempest.TempestApp"/>
             </manifest>
             <fileset dir="${build}">
-                <include name="dr/app/pathogen/**/*.class"/>
+                <include name="dr/app/tempest/**/*.class"/>
                 <include name="dr/app/beauti/**/*.class"/>
                 <include name="dr/app/tools/**/*.class"/>
                 <include name="dr/app/util/**/*.class"/>
@@ -77,7 +105,7 @@
                 <include name="images/*.png"/>
             </fileset>
             <fileset dir="${src}">
-                <include name="dr/app/pathogen/**/*.png"/>
+                <include name="dr/app/tempest/**/*.png"/>
                 <include name="dr/app/gui/**/*.png"/>
                 <include name="org/virion/jam/**/*.png"/>
                 <include name="dr/gui/table/**/*.png"/>
@@ -90,8 +118,8 @@
 
     <property name="version" value="1.5" />
     <property name="packaging_tools" value="packaging_tools" />
-    <property name="release_dir" value="release_pathogen" />
-    <property name="name" value="Path-O-Gen" />
+    <property name="release_dir" value="release_tempest" />
+    <property name="name" value="TempEst" />
 
     <property name="common_dir" value="${release_dir}/common" />
 
@@ -105,7 +133,7 @@
 
     <!-- Need to either install Launch4j under {BEAST workspace}/${release}
                  or install it in the default directory and change the location of launch4j.dir -->
-    <target name="windows" depends="build" description="release Windows version of Pathogen">
+    <target name="windows" depends="build" description="release Windows version of TempEst">
 
         <delete dir="${Windows_package_dir}" />
         <!-- Create the release directory -->
@@ -115,14 +143,14 @@
         <taskdef name="launch4j" classname="net.sf.launch4j.ant.Launch4jTask"
             classpath="${launch4j.dir}/launch4j.jar :${launch4j.dir}/lib/xstream.jar" />
 
-        <copy file="${dist}/pathogen.jar" todir="${Windows_package_dir}/lib"/>
+        <copy file="${dist}/tempest.jar" todir="${Windows_package_dir}/lib"/>
         <copy todir="${Windows_package_dir}/lib">
             <fileset dir="${Windows_dir}/lib"/>
         </copy>
         <copy file="${common_dir}/README.txt" todir="${Windows_package_dir}"/>
 
-        <launch4j configFile="${Windows_dir}/pathogen_launch4j.xml"
-                  jar="${dist}/pathogen.jar"
+        <launch4j configFile="${Windows_dir}/tempest_launch4j.xml"
+                  jar="${dist}/tempest.jar"
                   outfile="${Windows_package_dir}/${name} v${version}.exe"
                   fileVersion="${version}.0.0"
                   txtFileVersion="${version}"
@@ -136,7 +164,7 @@
         <echo message="Windows version release is finished." />
     </target>
 
-    <target name="linux" depends="build" description="release Linux/Unix version of Pathogen">
+    <target name="linux" depends="build" description="release Linux/Unix version of TempEst">
         <delete dir="${Linux_package_dir}" />
         <!-- Create the release directory -->
         <mkdir dir="${Linux_package_dir}" />
@@ -148,7 +176,7 @@
         <chmod dir="${Linux_package_dir}/bin" perm="755" includes="**/**"/>
 
         <copy file="${Linux_dir}/icons/pathogen.png" todir="${Linux_package_dir}/images"/>
-        <copy file="${dist}/pathogen.jar" todir="${Linux_package_dir}/lib"/>
+        <copy file="${dist}/tempest.jar" todir="${Linux_package_dir}/lib"/>
         <copy todir="${Linux_package_dir}/lib">
             <fileset dir="${Linux_dir}/lib"/>
         </copy>
@@ -161,7 +189,7 @@
         <echo message="Linux/Unix version release is finished." />
     </target>
 
-    <target name="mac" depends="build" description="release Mac version of Pathogen">
+    <target name="mac" depends="build" description="release Mac version of Tempest">
         <delete dir="${Mac_package_dir}" />
         <!-- Create the release directory -->
         <mkdir dir="${Mac_package_dir}" />
@@ -173,20 +201,18 @@
         <!-- create a jar bundle for the mac -->
         <jarbundler dir="${Mac_package_dir}"
                         name="${name} v${version}"
-                        mainclass="dr.app.pathogen.PathogenApp"
+                        mainclass="dr.app.tempest.TempestApp"
                         stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
-                        icon="${Mac_dir}/icons/Path-O-Gen.icns"
+                        icon="${Mac_dir}/icons/TempEst.icns"
                         jvmversion="1.6+"
                         arguments=""
+                        vmoptions="-Xmx1024M"
                         version="${version}"
                         copyright="${name} v${version}, Copyright 2006-2015, Andrew Rambaut"
                         bundleid="pathogen" >
             <javaproperty name="apple.laf.useScreenMenuBar" value="true"/>
             <jarfileset dir="${dist}">
-                <include name="**/pathogen.jar"/>
-            </jarfileset>
-            <jarfileset dir="${Mac_dir}/lib">
-                <include name="**/*.jar"/>
+                <include name="**/tempest.jar"/>
             </jarfileset>
         </jarbundler>
 
diff --git a/build_tracer.xml b/build_tracer.xml
index 1cd3902..bb32c2f 100644
--- a/build_tracer.xml
+++ b/build_tracer.xml
@@ -217,6 +217,7 @@
                         name="${name} v${version}"
                         mainclass="dr.app.tracer.application.TracerApp"
                         stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
+						useJavaXKey="true"
                         icon="${Mac_dir}/icons/Tracer.icns"
                         jvmversion="1.6+"
                         vmoptions="-Xmx1024M"
diff --git a/examples/release/clockModels/testStrictClock.xml b/examples/release/clockModels/testStrictClock.xml
index a397827..a916381 100644
--- a/examples/release/clockModels/testStrictClock.xml
+++ b/examples/release/clockModels/testStrictClock.xml
@@ -1,334 +1,338 @@
 <?xml version="1.0" standalone="yes"?>
-<!-- Generated by BEAUTi v1.4                                                -->
-<!--       Alexei Drummond and Andrew Rambaut                                -->
-<!--       Department of Zoology, University of Oxford                       -->
-<!--       http://evolve.zoo.ox.ac.uk/beast/                                 -->
-<beast>
-
-    <!-- The list of taxa analyse (can also include dates/ages).                 -->
-    <!-- ntax=17                                                                 -->
-    <taxa id="taxa">
-        <taxon id="D4Brazi82">
-            <date value="1982.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4ElSal83">
-            <date value="1983.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4ElSal94">
-            <date value="1994.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Indon76">
-            <date value="1976.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Indon77">
-            <date value="1977.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Mexico84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4NewCal81">
-            <date value="1981.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip64">
-            <date value="1964.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip56">
-            <date value="1956.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4PRico86">
-            <date value="1986.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4SLanka78">
-            <date value="1978.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Tahiti79">
-            <date value="1979.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Tahiti85">
-            <date value="1985.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai63">
-            <date value="1963.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai78">
-            <date value="1978.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-    </taxa>
-
-    <!-- The sequence alignment (each sequence refers to a taxon above).         -->
-    <!-- ntax=17 nchar=1485                                                      -->
-    <alignment id="alignment" dataType="nucleotide">
-        <sequence>
-            <taxon idref="D4Brazi82"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4ElSal83"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4ElSal94"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Indon76"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Indon77"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Mexico84"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4NewCal81"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip64"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip56"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip84"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4PRico86"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4SLanka78"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Tahiti79"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Tahiti85"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai63"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai78"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai84"/>
-            ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTG [...]
-        </sequence>
-    </alignment>
-
-    <!-- The unique patterns for all positions                                   -->
-    <!-- npatterns=138                                                           -->
-    <patterns id="patterns" from="1">
-        <alignment idref="alignment"/>
-    </patterns>
 
-    <!-- A prior assumption that the population size has remained constant       -->
-    <!-- throughout the time spanned by the genealogy.                           -->
-    <constantSize id="constant" units="years">
-        <populationSize>
-            <parameter id="constant.popSize" value="380.0" lower="0.0" upper="38000.0"/>
-        </populationSize>
-    </constantSize>
-
-    <!-- Generate a random starting tree under the coalescent process            -->
-    <coalescentTree id="startingTree">
-        <taxa idref="taxa"/>
-        <constantSize idref="constant"/>
-    </coalescentTree>
-
-    <treeModel id="treeModel">
-        <coalescentTree idref="startingTree"/>
-        <rootHeight>
-            <parameter id="treeModel.rootHeight"/>
-        </rootHeight>
-        <nodeHeights internalNodes="true">
-            <parameter id="treeModel.internalNodeHeights"/>
-        </nodeHeights>
-        <nodeHeights internalNodes="true" rootNode="true">
-            <parameter id="treeModel.allInternalNodeHeights"/>
-        </nodeHeights>
-    </treeModel>
-
-    <coalescentLikelihood id="coalescent">
-        <model>
-            <constantSize idref="constant"/>
-        </model>
-        <populationTree>
-            <treeModel idref="treeModel"/>
-        </populationTree>
-    </coalescentLikelihood>
+<!-- Generated by BEAUTi v1.8.3 Prerelease r20150808                         -->
+<!--       by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard         -->
+<!--       Department of Computer Science, University of Auckland and        -->
+<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
+<!--       David Geffen School of Medicine, University of California, Los Angeles-->
+<!--       http://beast.bio.ed.ac.uk/                                        -->
+<beast>
 
-    <!-- The strict clock (Uniform rates across branches)                        -->
-    <strictClockBranchRates id="branchRates">
-        <rate>
-            <parameter id="clock.rate" value="2.3E-5" lower="0.0" upper="100.0"/>
-        </rate>
-    </strictClockBranchRates>
+	<!-- The list of taxa to be analysed (can also include dates/ages).          -->
+	<!-- ntax=17                                                                 -->
+	<taxa id="taxa">
+		<taxon id="D4Brazi82">
+			<date value="1982.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal83">
+			<date value="1983.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal94">
+			<date value="1994.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon76">
+			<date value="1976.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon77">
+			<date value="1977.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Mexico84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4NewCal81">
+			<date value="1981.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip64">
+			<date value="1964.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip56">
+			<date value="1956.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4PRico86">
+			<date value="1986.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4SLanka78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti79">
+			<date value="1979.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti85">
+			<date value="1985.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai63">
+			<date value="1963.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+	</taxa>
 
-    <!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
-    <hkyModel id="hky">
-        <frequencies>
-            <frequencyModel dataType="nucleotide">
-                <alignment idref="alignment"/>
-                <frequencies>
-                    <parameter id="hky.frequencies" dimension="4"/>
-                </frequencies>
-            </frequencyModel>
-        </frequencies>
-        <kappa>
-            <parameter id="hky.kappa" value="1.0" lower="0.0" upper="100.0"/>
-        </kappa>
-    </hkyModel>
+	<!-- The sequence alignment (each sequence refers to a taxon above).         -->
+	<!-- ntax=17 nchar=1485                                                      -->
+	<alignment id="alignment" dataType="nucleotide">
+		<sequence>
+			<taxon idref="D4Brazi82"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal83"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal94"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon76"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon77"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Mexico84"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4NewCal81"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip64"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip56"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip84"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4PRico86"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4SLanka78"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti79"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti85"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai63"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai78"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai84"/>
+			ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+	</alignment>
 
-    <!-- site model                                                              -->
-    <siteModel id="siteModel">
-        <substitutionModel>
-            <hkyModel idref="hky"/>
-        </substitutionModel>
-    </siteModel>
+	<!-- The unique patterns from 1 to end                                       -->
+	<!-- npatterns=138                                                           -->
+	<patterns id="patterns" from="1" strip="false">
+		<alignment idref="alignment"/>
+	</patterns>
 
-    <treeLikelihood id="treeLikelihood" useAmbiguities="true">
-        <patterns idref="patterns"/>
-        <treeModel idref="treeModel"/>
-        <siteModel idref="siteModel"/>
-        <strictClockBranchRates idref="branchRates"/>
-    </treeLikelihood>
+	<!-- A prior assumption that the population size has remained constant       -->
+	<!-- throughout the time spanned by the genealogy.                           -->
+	<constantSize id="constant" units="years">
+		<populationSize>
+			<parameter id="constant.popSize" value="380.0" lower="0.0"/>
+		</populationSize>
+	</constantSize>
 
-    <operators id="operators">
-        <scaleOperator scaleFactor="0.75" weight="1">
-            <parameter idref="hky.kappa"/>
-        </scaleOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="clock.rate"/>
-        </scaleOperator>
-        <upDownOperator scaleFactor="0.75" weight="3">
-            <up>
-                <parameter idref="clock.rate"/>
-            </up>
-            <down>
-                <parameter idref="treeModel.allInternalNodeHeights"/>
-            </down>
-        </upDownOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="constant.popSize"/>
-        </scaleOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="treeModel.rootHeight"/>
-        </scaleOperator>
-        <uniformOperator weight="30">
-            <parameter idref="treeModel.internalNodeHeights"/>
-        </uniformOperator>
-        <subtreeSlide size="1.0" gaussian="true" weight="15">
-            <treeModel idref="treeModel"/>
-        </subtreeSlide>
-        <narrowExchange weight="15">
-            <treeModel idref="treeModel"/>
-        </narrowExchange>
-        <wideExchange weight="3">
-            <treeModel idref="treeModel"/>
-        </wideExchange>
-        <wilsonBalding weight="3">
-            <treeModel idref="treeModel"/>
-        </wilsonBalding>
-    </operators>
+	<!-- Generate a random starting tree under the coalescent process            -->
+	<coalescentSimulator id="startingTree">
+		<taxa idref="taxa"/>
+		<constantSize idref="constant"/>
+	</coalescentSimulator>
 
-    <mcmc id="mcmc" chainLength="1000000" autoOptimize="true">
-        <posterior id="posterior">
-            <prior id="prior">
-                <coalescentLikelihood idref="coalescent"/>
-            </prior>
-            <likelihood id="likelihood">
-                <treeLikelihood idref="treeLikelihood"/>
-            </likelihood>
-        </posterior>
-        <operators idref="operators"/>
-        <log id="screenLog" logEvery="10000">
-            <column label="Posterior" dp="4" width="12">
-                <posterior idref="posterior"/>
-            </column>
-            <column label="Root Height" sf="6" width="12">
-                <parameter idref="treeModel.rootHeight"/>
-            </column>
-            <column label="L(tree)" dp="4" width="12">
-                <treeLikelihood idref="treeLikelihood"/>
-            </column>
-            <column label="L(coalecent)" dp="4" width="12">
-                <coalescentLikelihood idref="coalescent"/>
-            </column>
-        </log>
-        <log id="fileLog" logEvery="500" fileName="testStrictClock.log">
-            <posterior idref="posterior"/>
-            <parameter idref="clock.rate"/>
-            <parameter idref="constant.popSize"/>
-            <parameter idref="hky.kappa"/>
-            <parameter idref="treeModel.rootHeight"/>
-            <treeLikelihood idref="treeLikelihood"/>
-            <coalescentLikelihood idref="coalescent"/>
-        </log>
-        <logTree id="treeFileLog" logEvery="500" nexusFormat="true" fileName="testStrictClock.trees">
-            <treeModel idref="treeModel"/>
-            <posterior idref="posterior"/>
-        </logTree>
-    </mcmc>
+	<!-- Generate a tree model                                                   -->
+	<treeModel id="treeModel">
+		<coalescentTree idref="startingTree"/>
+		<rootHeight>
+			<parameter id="treeModel.rootHeight"/>
+		</rootHeight>
+		<nodeHeights internalNodes="true">
+			<parameter id="treeModel.internalNodeHeights"/>
+		</nodeHeights>
+		<nodeHeights internalNodes="true" rootNode="true">
+			<parameter id="treeModel.allInternalNodeHeights"/>
+		</nodeHeights>
+	</treeModel>
 
-    <report>
-        <property name="timer">
-            <object idref="mcmc"/>
-        </property>
-    </report>
+	<!-- Generate a coalescent likelihood                                        -->
+	<coalescentLikelihood id="coalescent">
+		<model>
+			<constantSize idref="constant"/>
+		</model>
+		<populationTree>
+			<treeModel idref="treeModel"/>
+		</populationTree>
+	</coalescentLikelihood>
 
-    <!--
-     To check that the program is working, the results are compared to previously
-     run results. The following are for a run of 10,000,000 steps:
+	<!-- The strict clock (Uniform rates across branches)                        -->
+	<strictClockBranchRates id="branchRates">
+		<rate>
+			<parameter id="clock.rate" value="1.0" lower="0.0"/>
+		</rate>
+	</strictClockBranchRates>
 
-     18.146116666666668 minutes
+	<!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
+	<HKYModel id="hky">
+		<frequencies>
+			<frequencyModel dataType="nucleotide">
+				<frequencies>
+					<parameter id="frequencies" value="0.25 0.25 0.25 0.25"/>
+				</frequencies>
+			</frequencyModel>
+		</frequencies>
+		<kappa>
+			<parameter id="kappa" value="2.0" lower="0.0"/>
+		</kappa>
+	</HKYModel>
 
-     burnIn=1000000
-     maxState=10000000
+	<!-- site model                                                              -->
+	<siteModel id="siteModel">
+		<substitutionModel>
+			<HKYModel idref="hky"/>
+		</substitutionModel>
+	</siteModel>
 
-     statistic                mean          hpdLower      hpdUpper      ESS
-     posterior                -3928.71      -3936.77      -3921.22      6007.89
-     clock.rate               8.04835E-4    6.14761E-4    9.9785E-4     4548.69
-     constant.popSize         37.3762       17.5853       60.2203       6696.56
-     hky.kappa                18.2782       11.9770       25.5550       8090.05
-     treeModel.rootHeight     69.0580       57.6331       81.9754       5044.06
-     treeLikelihood           -3856.59      -3863.13      -3850.89      8249.32
-     coalescent               -72.1285      -77.1410      -67.2787      4393.94
-     -->
+	<!-- Likelihood for tree given sequence data                                 -->
+	<treeLikelihood id="treeLikelihood" useAmbiguities="false">
+		<patterns idref="patterns"/>
+		<treeModel idref="treeModel"/>
+		<siteModel idref="siteModel"/>
+		<strictClockBranchRates idref="branchRates"/>
+	</treeLikelihood>
 
-    <traceAnalysis fileName="testStrictClock.log">
-        <expectation name="posterior" value="-3928.71"/>
-        <expectation name="clock.rate" value="8.04835E-4"/>
-        <expectation name="constant.popSize" value="37.3762"/>
-        <expectation name="hky.kappa" value="18.2782"/>
-        <expectation name="treeModel.rootHeight" value="69.0580"/>
-        <expectation name="treeLikelihood" value="-3856.59"/>
-        <expectation name="coalescent" value="-72.1285"/>
-    </traceAnalysis>
+	<!-- Define operators                                                        -->
+	<operators id="operators" optimizationSchedule="default">
+		<scaleOperator scaleFactor="0.75" weight="1">
+			<parameter idref="kappa"/>
+		</scaleOperator>
+		<deltaExchange delta="0.01" weight="1">
+			<parameter idref="frequencies"/>
+		</deltaExchange>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="clock.rate"/>
+		</scaleOperator>
+		<subtreeSlide size="38.0" gaussian="true" weight="150">
+			<treeModel idref="treeModel"/>
+		</subtreeSlide>
+		<narrowExchange weight="150">
+			<treeModel idref="treeModel"/>
+		</narrowExchange>
+		<wideExchange weight="30">
+			<treeModel idref="treeModel"/>
+		</wideExchange>
+		<wilsonBalding weight="30">
+			<treeModel idref="treeModel"/>
+		</wilsonBalding>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="treeModel.rootHeight"/>
+		</scaleOperator>
+		<uniformOperator weight="300">
+			<parameter idref="treeModel.internalNodeHeights"/>
+		</uniformOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="constant.popSize"/>
+		</scaleOperator>
+		<upDownOperator scaleFactor="0.75" weight="30">
+			<up>
+				<parameter idref="clock.rate"/>
+			</up>
+			<down>
+				<parameter idref="treeModel.allInternalNodeHeights"/>
+			</down>
+		</upDownOperator>
+	</operators>
 
-    <treeTraceAnalysis fileName="testStrictClock.trees"/>
+	<!-- Define MCMC                                                             -->
+	<mcmc id="mcmc" chainLength="10000000" autoOptimize="true" operatorAnalysis="testStrictClock.ops">
+		<posterior id="posterior">
+			<prior id="prior">
+				<logNormalPrior mean="1.0" stdev="1.25" offset="0.0" meanInRealSpace="false">
+					<parameter idref="kappa"/>
+				</logNormalPrior>
+				<uniformPrior lower="0.0" upper="1.0">
+					<parameter idref="frequencies"/>
+				</uniformPrior>
+				<ctmcScalePrior>
+					<ctmcScale>
+						<parameter idref="clock.rate"/>
+					</ctmcScale>
+					<treeModel idref="treeModel"/>
+				</ctmcScalePrior>
+				<oneOnXPrior>
+					<parameter idref="constant.popSize"/>
+				</oneOnXPrior>
+				<coalescentLikelihood idref="coalescent"/>
+			</prior>
+			<likelihood id="likelihood">
+				<treeLikelihood idref="treeLikelihood"/>
+				<strictClockBranchRates idref="branchRates"/>
+			</likelihood>
+		</posterior>
+		<operators idref="operators"/>
 
-    <marginalLikelihoodAnalysis fileName="testStrictClock.log">
-        <likelihoodColumn name="treeLikelihood" harmonicOnly="false" bootstrap="true"/>
-    </marginalLikelihoodAnalysis>
+		<!-- write log to screen                                                     -->
+		<log id="screenLog" logEvery="1000">
+			<column label="Posterior" dp="4" width="12">
+				<posterior idref="posterior"/>
+			</column>
+			<column label="Prior" dp="4" width="12">
+				<prior idref="prior"/>
+			</column>
+			<column label="Likelihood" dp="4" width="12">
+				<likelihood idref="likelihood"/>
+			</column>
+			<column label="rootHeight" sf="6" width="12">
+				<parameter idref="treeModel.rootHeight"/>
+			</column>
+			<column label="clock.rate" sf="6" width="12">
+				<parameter idref="clock.rate"/>
+			</column>
+		</log>
 
+		<!-- write log to file                                                       -->
+		<log id="fileLog" logEvery="1000" fileName="testStrictClock.log" overwrite="false">
+			<posterior idref="posterior"/>
+			<prior idref="prior"/>
+			<likelihood idref="likelihood"/>
+			<parameter idref="treeModel.rootHeight"/>
+			<parameter idref="constant.popSize"/>
+			<parameter idref="kappa"/>
+			<parameter idref="frequencies"/>
+			<parameter idref="clock.rate"/>
+			<treeLikelihood idref="treeLikelihood"/>
+			<strictClockBranchRates idref="branchRates"/>
+			<coalescentLikelihood idref="coalescent"/>
+		</log>
 
+		<!-- write tree log to file                                                  -->
+		<logTree id="treeFileLog" logEvery="1000" nexusFormat="true" fileName="testStrictClock.trees" sortTranslationTable="true">
+			<treeModel idref="treeModel"/>
+			<trait name="rate" tag="rate">
+				<strictClockBranchRates idref="branchRates"/>
+			</trait>
+			<posterior idref="posterior"/>
+		</logTree>
+	</mcmc>
+	<report>
+		<property name="timer">
+			<mcmc idref="mcmc"/>
+		</property>
+	</report>
 </beast>
diff --git a/examples/release/clockModels/testUCRelaxedClockContinuousQuantileLogNormal.xml b/examples/release/clockModels/testUCRelaxedClockContinuousQuantileLogNormal.xml
new file mode 100644
index 0000000..006810f
--- /dev/null
+++ b/examples/release/clockModels/testUCRelaxedClockContinuousQuantileLogNormal.xml
@@ -0,0 +1,375 @@
+<?xml version="1.0" standalone="yes"?>
+
+<!-- Generated by BEAUTi v1.8.3 Prerelease r20150808                         -->
+<!--       by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard         -->
+<!--       Department of Computer Science, University of Auckland and        -->
+<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
+<!--       David Geffen School of Medicine, University of California, Los Angeles-->
+<!--       http://beast.bio.ed.ac.uk/                                        -->
+<beast>
+
+	<!-- The list of taxa to be analysed (can also include dates/ages).          -->
+	<!-- ntax=17                                                                 -->
+	<taxa id="taxa">
+		<taxon id="D4Brazi82">
+			<date value="1982.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal83">
+			<date value="1983.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal94">
+			<date value="1994.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon76">
+			<date value="1976.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon77">
+			<date value="1977.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Mexico84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4NewCal81">
+			<date value="1981.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip64">
+			<date value="1964.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip56">
+			<date value="1956.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4PRico86">
+			<date value="1986.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4SLanka78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti79">
+			<date value="1979.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti85">
+			<date value="1985.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai63">
+			<date value="1963.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+	</taxa>
+
+	<!-- The sequence alignment (each sequence refers to a taxon above).         -->
+	<!-- ntax=17 nchar=1485                                                      -->
+	<alignment id="alignment" dataType="nucleotide">
+		<sequence>
+			<taxon idref="D4Brazi82"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal83"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal94"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon76"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon77"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Mexico84"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4NewCal81"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip64"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip56"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip84"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4PRico86"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4SLanka78"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti79"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti85"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai63"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai78"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai84"/>
+			ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+	</alignment>
+
+	<!-- The unique patterns from 1 to end                                       -->
+	<!-- npatterns=138                                                           -->
+	<patterns id="patterns" from="1" strip="false">
+		<alignment idref="alignment"/>
+	</patterns>
+
+	<!-- A prior assumption that the population size has remained constant       -->
+	<!-- throughout the time spanned by the genealogy.                           -->
+	<constantSize id="constant" units="years">
+		<populationSize>
+			<parameter id="constant.popSize" value="380.0" lower="0.0"/>
+		</populationSize>
+	</constantSize>
+
+	<!-- Generate a random starting tree under the coalescent process            -->
+	<coalescentSimulator id="startingTree">
+		<taxa idref="taxa"/>
+		<constantSize idref="constant"/>
+	</coalescentSimulator>
+
+	<!-- Generate a tree model                                                   -->
+	<treeModel id="treeModel">
+		<coalescentTree idref="startingTree"/>
+		<rootHeight>
+			<parameter id="treeModel.rootHeight"/>
+		</rootHeight>
+		<nodeHeights internalNodes="true">
+			<parameter id="treeModel.internalNodeHeights"/>
+		</nodeHeights>
+		<nodeHeights internalNodes="true" rootNode="true">
+			<parameter id="treeModel.allInternalNodeHeights"/>
+		</nodeHeights>
+	</treeModel>
+
+	<!-- Generate a coalescent likelihood                                        -->
+	<coalescentLikelihood id="coalescent">
+		<model>
+			<constantSize idref="constant"/>
+		</model>
+		<populationTree>
+			<treeModel idref="treeModel"/>
+		</populationTree>
+	</coalescentLikelihood>
+
+	<!-- The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut (2006) PLoS Biology 4, e88 )-->
+	<continuousBranchRates id="branchRates">
+		<treeModel idref="treeModel"/>
+		<distribution>
+			<logNormalDistributionModel meanInRealSpace="true">
+				<mean>
+					<parameter id="ucld.mean" value="1.0" lower="0.0"/>
+				</mean>
+				<stdev>
+					<parameter id="ucld.stdev" value="0.3333333333333333" lower="0.0"/>
+				</stdev>
+			</logNormalDistributionModel>
+		</distribution>
+		<rateCategoryQuantiles>
+			<parameter id="branchRates.quantiles" value="0.5" lower="0.0" upper="1.0"/>
+		</rateCategoryQuantiles>
+	</continuousBranchRates>
+	
+	<rateStatistic id="meanRate" name="meanRate" mode="mean" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<continuousBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateStatistic id="coefficientOfVariation" name="coefficientOfVariation" mode="coefficientOfVariation" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<continuousBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateCovarianceStatistic id="covariance" name="covariance">
+		<treeModel idref="treeModel"/>
+		<continuousBranchRates idref="branchRates"/>
+	</rateCovarianceStatistic>
+
+	<!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
+	<HKYModel id="hky">
+		<frequencies>
+			<frequencyModel dataType="nucleotide">
+				<frequencies>
+					<parameter id="frequencies" value="0.25 0.25 0.25 0.25"/>
+				</frequencies>
+			</frequencyModel>
+		</frequencies>
+		<kappa>
+			<parameter id="kappa" value="2.0" lower="0.0"/>
+		</kappa>
+	</HKYModel>
+
+	<!-- site model                                                              -->
+	<siteModel id="siteModel">
+		<substitutionModel>
+			<HKYModel idref="hky"/>
+		</substitutionModel>
+	</siteModel>
+
+	<!-- Likelihood for tree given sequence data                                 -->
+	<treeLikelihood id="treeLikelihood" useAmbiguities="false">
+		<patterns idref="patterns"/>
+		<treeModel idref="treeModel"/>
+		<siteModel idref="siteModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</treeLikelihood>
+
+	<!-- Define operators                                                        -->
+	<operators id="operators" optimizationSchedule="default">
+		<scaleOperator scaleFactor="0.75" weight="1">
+			<parameter idref="kappa"/>
+		</scaleOperator>
+		<deltaExchange delta="0.01" weight="1">
+			<parameter idref="frequencies"/>
+		</deltaExchange>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucld.mean"/>
+		</scaleOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucld.stdev"/>
+		</scaleOperator>
+		<subtreeSlide size="38.0" gaussian="true" weight="150">
+			<treeModel idref="treeModel"/>
+		</subtreeSlide>
+		<narrowExchange weight="150">
+			<treeModel idref="treeModel"/>
+		</narrowExchange>
+		<wideExchange weight="30">
+			<treeModel idref="treeModel"/>
+		</wideExchange>
+		<wilsonBalding weight="30">
+			<treeModel idref="treeModel"/>
+		</wilsonBalding>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="treeModel.rootHeight"/>
+		</scaleOperator>
+		<uniformOperator weight="300">
+			<parameter idref="treeModel.internalNodeHeights"/>
+		</uniformOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="constant.popSize"/>
+		</scaleOperator>
+		<upDownOperator scaleFactor="0.75" weight="30">
+			<up>
+				<parameter idref="ucld.mean"/>
+			</up>
+			<down>
+				<parameter idref="treeModel.allInternalNodeHeights"/>
+			</down>
+		</upDownOperator>
+		<uniformOperator weight="300" lower="0.0" upper="1.0">
+			<parameter idref="branchRates.quantiles"/>
+		</uniformOperator>
+	</operators>
+
+	<!-- Define MCMC                                                             -->
+	<mcmc id="mcmc" chainLength="10000000" autoOptimize="true" operatorAnalysis="testUCRelaxedClockContinuousQuantileLogNormal.ops">
+		<posterior id="posterior">
+			<prior id="prior">
+				<logNormalPrior mean="1.0" stdev="1.25" offset="0.0" meanInRealSpace="false">
+					<parameter idref="kappa"/>
+				</logNormalPrior>
+				<uniformPrior lower="0.0" upper="1.0">
+					<parameter idref="frequencies"/>
+				</uniformPrior>
+				<exponentialPrior mean="0.3333333333333333" offset="0.0">
+					<parameter idref="ucld.stdev"/>
+				</exponentialPrior>
+				<ctmcScalePrior>
+					<ctmcScale>
+						<parameter idref="ucld.mean"/>
+					</ctmcScale>
+					<treeModel idref="treeModel"/>
+				</ctmcScalePrior>
+				<oneOnXPrior>
+					<parameter idref="constant.popSize"/>
+				</oneOnXPrior>
+				<coalescentLikelihood idref="coalescent"/>
+			</prior>
+			<likelihood id="likelihood">
+				<treeLikelihood idref="treeLikelihood"/>
+				<continuousBranchRates idref="branchRates"/>
+			</likelihood>
+		</posterior>
+		<operators idref="operators"/>
+
+		<!-- write log to screen                                                     -->
+		<log id="screenLog" logEvery="1000">
+			<column label="Posterior" dp="4" width="12">
+				<posterior idref="posterior"/>
+			</column>
+			<column label="Prior" dp="4" width="12">
+				<prior idref="prior"/>
+			</column>
+			<column label="Likelihood" dp="4" width="12">
+				<likelihood idref="likelihood"/>
+			</column>
+			<column label="rootHeight" sf="6" width="12">
+				<parameter idref="treeModel.rootHeight"/>
+			</column>
+			<column label="ucld.mean" sf="6" width="12">
+				<parameter idref="ucld.mean"/>
+			</column>
+		</log>
+
+		<!-- write log to file                                                       -->
+		<log id="fileLog" logEvery="1000" fileName="testUCRelaxedClockContinuousQuantileLogNormal.log" overwrite="false">
+			<posterior idref="posterior"/>
+			<prior idref="prior"/>
+			<likelihood idref="likelihood"/>
+			<parameter idref="treeModel.rootHeight"/>
+			<parameter idref="constant.popSize"/>
+			<parameter idref="kappa"/>
+			<parameter idref="frequencies"/>
+			<parameter idref="ucld.mean"/>
+			<parameter idref="ucld.stdev"/>
+			<rateStatistic idref="meanRate"/>
+			<rateStatistic idref="coefficientOfVariation"/>
+			<rateCovarianceStatistic idref="covariance"/>
+			<treeLikelihood idref="treeLikelihood"/>
+			<continuousBranchRates idref="branchRates"/>
+			<coalescentLikelihood idref="coalescent"/>
+		</log>
+
+		<!-- write tree log to file                                                  -->
+		<logTree id="treeFileLog" logEvery="1000" nexusFormat="true" fileName="testUCRelaxedClockContinuousQuantileLogNormal.trees" sortTranslationTable="true">
+			<treeModel idref="treeModel"/>
+			<trait name="rate" tag="rate">
+				<continuousBranchRates idref="branchRates"/>
+			</trait>
+			<posterior idref="posterior"/>
+		</logTree>
+	</mcmc>
+	<report>
+		<property name="timer">
+			<mcmc idref="mcmc"/>
+		</property>
+	</report>
+</beast>
diff --git a/examples/release/clockModels/testUCRelaxedClockGamma.xml b/examples/release/clockModels/testUCRelaxedClockGamma.xml
new file mode 100644
index 0000000..6ea3e89
--- /dev/null
+++ b/examples/release/clockModels/testUCRelaxedClockGamma.xml
@@ -0,0 +1,377 @@
+<?xml version="1.0" standalone="yes"?>
+
+<!-- Generated by BEAUTi v1.8.3 Prerelease r20150808                         -->
+<!--       by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard         -->
+<!--       Department of Computer Science, University of Auckland and        -->
+<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
+<!--       David Geffen School of Medicine, University of California, Los Angeles-->
+<!--       http://beast.bio.ed.ac.uk/                                        -->
+<beast>
+
+	<!-- The list of taxa to be analysed (can also include dates/ages).          -->
+	<!-- ntax=17                                                                 -->
+	<taxa id="taxa">
+		<taxon id="D4Brazi82">
+			<date value="1982.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal83">
+			<date value="1983.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal94">
+			<date value="1994.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon76">
+			<date value="1976.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon77">
+			<date value="1977.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Mexico84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4NewCal81">
+			<date value="1981.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip64">
+			<date value="1964.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip56">
+			<date value="1956.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4PRico86">
+			<date value="1986.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4SLanka78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti79">
+			<date value="1979.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti85">
+			<date value="1985.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai63">
+			<date value="1963.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+	</taxa>
+
+	<!-- The sequence alignment (each sequence refers to a taxon above).         -->
+	<!-- ntax=17 nchar=1485                                                      -->
+	<alignment id="alignment" dataType="nucleotide">
+		<sequence>
+			<taxon idref="D4Brazi82"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal83"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal94"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon76"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon77"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Mexico84"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4NewCal81"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip64"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip56"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip84"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4PRico86"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4SLanka78"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti79"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti85"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai63"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai78"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai84"/>
+			ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+	</alignment>
+
+	<!-- The unique patterns from 1 to end                                       -->
+	<!-- npatterns=138                                                           -->
+	<patterns id="patterns" from="1" strip="false">
+		<alignment idref="alignment"/>
+	</patterns>
+
+	<!-- A prior assumption that the population size has remained constant       -->
+	<!-- throughout the time spanned by the genealogy.                           -->
+	<constantSize id="constant" units="years">
+		<populationSize>
+			<parameter id="constant.popSize" value="380.0" lower="0.0"/>
+		</populationSize>
+	</constantSize>
+
+	<!-- Generate a random starting tree under the coalescent process            -->
+	<coalescentSimulator id="startingTree">
+		<taxa idref="taxa"/>
+		<constantSize idref="constant"/>
+	</coalescentSimulator>
+
+	<!-- Generate a tree model                                                   -->
+	<treeModel id="treeModel">
+		<coalescentTree idref="startingTree"/>
+		<rootHeight>
+			<parameter id="treeModel.rootHeight"/>
+		</rootHeight>
+		<nodeHeights internalNodes="true">
+			<parameter id="treeModel.internalNodeHeights"/>
+		</nodeHeights>
+		<nodeHeights internalNodes="true" rootNode="true">
+			<parameter id="treeModel.allInternalNodeHeights"/>
+		</nodeHeights>
+	</treeModel>
+
+	<!-- Generate a coalescent likelihood                                        -->
+	<coalescentLikelihood id="coalescent">
+		<model>
+			<constantSize idref="constant"/>
+		</model>
+		<populationTree>
+			<treeModel idref="treeModel"/>
+		</populationTree>
+	</coalescentLikelihood>
+
+	<!-- The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut (2006) PLoS Biology 4, e88 )-->
+	<discretizedBranchRates id="branchRates">
+		<treeModel idref="treeModel"/>
+		<distribution>
+			<gammaDistributionModel>
+				<mean>
+					<parameter id="ucgd.mean" value="0.001" lower="0.0"/>
+				</mean>
+				<shape>
+					<parameter id="ucgd.shape" value="0.3333333333333333" lower="0.0"/>
+				</shape>
+			</gammaDistributionModel>
+		</distribution>
+		<rateCategories>
+			<parameter id="branchRates.categories"/>
+		</rateCategories>
+	</discretizedBranchRates>
+	<rateStatistic id="meanRate" name="meanRate" mode="mean" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateStatistic id="coefficientOfVariation" name="coefficientOfVariation" mode="coefficientOfVariation" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateCovarianceStatistic id="covariance" name="covariance">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateCovarianceStatistic>
+
+	<!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
+	<HKYModel id="hky">
+		<frequencies>
+			<frequencyModel dataType="nucleotide">
+				<frequencies>
+					<parameter id="frequencies" value="0.25 0.25 0.25 0.25"/>
+				</frequencies>
+			</frequencyModel>
+		</frequencies>
+		<kappa>
+			<parameter id="kappa" value="2.0" lower="0.0"/>
+		</kappa>
+	</HKYModel>
+
+	<!-- site model                                                              -->
+	<siteModel id="siteModel">
+		<substitutionModel>
+			<HKYModel idref="hky"/>
+		</substitutionModel>
+	</siteModel>
+
+	<!-- Likelihood for tree given sequence data                                 -->
+	<treeLikelihood id="treeLikelihood" useAmbiguities="false">
+		<patterns idref="patterns"/>
+		<treeModel idref="treeModel"/>
+		<siteModel idref="siteModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</treeLikelihood>
+
+	<!-- Define operators                                                        -->
+	<operators id="operators" optimizationSchedule="default">
+		<scaleOperator scaleFactor="0.75" weight="1">
+			<parameter idref="kappa"/>
+		</scaleOperator>
+		<deltaExchange delta="0.01" weight="1">
+			<parameter idref="frequencies"/>
+		</deltaExchange>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucgd.mean"/>
+		</scaleOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucgd.shape"/>
+		</scaleOperator>
+		<subtreeSlide size="38.0" gaussian="true" weight="150">
+			<treeModel idref="treeModel"/>
+		</subtreeSlide>
+		<narrowExchange weight="150">
+			<treeModel idref="treeModel"/>
+		</narrowExchange>
+		<wideExchange weight="30">
+			<treeModel idref="treeModel"/>
+		</wideExchange>
+		<wilsonBalding weight="30">
+			<treeModel idref="treeModel"/>
+		</wilsonBalding>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="treeModel.rootHeight"/>
+		</scaleOperator>
+		<uniformOperator weight="300">
+			<parameter idref="treeModel.internalNodeHeights"/>
+		</uniformOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="constant.popSize"/>
+		</scaleOperator>
+		<upDownOperator scaleFactor="0.75" weight="30">
+			<up>
+				<parameter idref="ucgd.mean"/>
+			</up>
+			<down>
+				<parameter idref="treeModel.allInternalNodeHeights"/>
+			</down>
+		</upDownOperator>
+		<swapOperator size="1" weight="100" autoOptimize="false">
+			<parameter idref="branchRates.categories"/>
+		</swapOperator>
+		<uniformIntegerOperator weight="100">
+			<parameter idref="branchRates.categories"/>
+		</uniformIntegerOperator>
+	</operators>
+
+	<!-- Define MCMC                                                             -->
+	<mcmc id="mcmc" chainLength="10000000" autoOptimize="true" operatorAnalysis="testUCRelaxedClockGamma.ops">
+		<posterior id="posterior">
+			<prior id="prior">
+				<logNormalPrior mean="1.0" stdev="1.25" offset="0.0" meanInRealSpace="false">
+					<parameter idref="kappa"/>
+				</logNormalPrior>
+				<uniformPrior lower="0.0" upper="1.0">
+					<parameter idref="frequencies"/>
+				</uniformPrior>
+				<inverseGammaPrior shape="1.0" scale="5.0">
+					<parameter idref="ucgd.shape"/>
+				</inverseGammaPrior>
+				<ctmcScalePrior>
+					<ctmcScale>
+						<parameter idref="ucgd.mean"/>
+					</ctmcScale>
+					<treeModel idref="treeModel"/>
+				</ctmcScalePrior>
+				<oneOnXPrior>
+					<parameter idref="constant.popSize"/>
+				</oneOnXPrior>
+				<coalescentLikelihood idref="coalescent"/>
+			</prior>
+			<likelihood id="likelihood">
+				<treeLikelihood idref="treeLikelihood"/>
+				<discretizedBranchRates idref="branchRates"/>
+			</likelihood>
+		</posterior>
+		<operators idref="operators"/>
+
+		<!-- write log to screen                                                     -->
+		<log id="screenLog" logEvery="1000">
+			<column label="Posterior" dp="4" width="12">
+				<posterior idref="posterior"/>
+			</column>
+			<column label="Prior" dp="4" width="12">
+				<prior idref="prior"/>
+			</column>
+			<column label="Likelihood" dp="4" width="12">
+				<likelihood idref="likelihood"/>
+			</column>
+			<column label="rootHeight" sf="6" width="12">
+				<parameter idref="treeModel.rootHeight"/>
+			</column>
+			<column label="ucgd.mean" sf="6" width="12">
+				<parameter idref="ucgd.mean"/>
+			</column>
+		</log>
+
+		<!-- write log to file                                                       -->
+		<log id="fileLog" logEvery="1000" fileName="testUCRelaxedClockGamma.log" overwrite="false">
+			<posterior idref="posterior"/>
+			<prior idref="prior"/>
+			<likelihood idref="likelihood"/>
+			<parameter idref="treeModel.rootHeight"/>
+			<parameter idref="constant.popSize"/>
+			<parameter idref="kappa"/>
+			<parameter idref="frequencies"/>
+			<parameter idref="ucgd.mean"/>
+			<parameter idref="ucgd.shape"/>
+			<rateStatistic idref="meanRate"/>
+			<rateStatistic idref="coefficientOfVariation"/>
+			<rateCovarianceStatistic idref="covariance"/>
+			<treeLikelihood idref="treeLikelihood"/>
+			<discretizedBranchRates idref="branchRates"/>
+			<coalescentLikelihood idref="coalescent"/>
+		</log>
+
+		<!-- write tree log to file                                                  -->
+		<logTree id="treeFileLog" logEvery="1000" nexusFormat="true" fileName="testUCRelaxedClockGamma.trees" sortTranslationTable="true">
+			<treeModel idref="treeModel"/>
+			<trait name="rate" tag="rate">
+				<discretizedBranchRates idref="branchRates"/>
+			</trait>
+			<posterior idref="posterior"/>
+		</logTree>
+	</mcmc>
+	<report>
+		<property name="timer">
+			<mcmc idref="mcmc"/>
+		</property>
+	</report>
+</beast>
diff --git a/examples/release/clockModels/testUCRelaxedClockLogNormal.xml b/examples/release/clockModels/testUCRelaxedClockLogNormal.xml
index 6c6af39..4587aef 100644
--- a/examples/release/clockModels/testUCRelaxedClockLogNormal.xml
+++ b/examples/release/clockModels/testUCRelaxedClockLogNormal.xml
@@ -1,274 +1,377 @@
-<?xml version="1.0" standalone="yes"?>
-
-<!-- Generated by BEAUTi v1.7.0 Prerelease r3910                             -->
-<!--       by Alexei J. Drummond and Andrew Rambaut                          -->
-<!--       Department of Computer Science, University of Auckland and        -->
-<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
-<!--       http://beast.bio.ed.ac.uk/                                        -->
-<beast>
-
-	<!-- The list of taxa to be analysed (can also include dates/ages).          -->
-	<!-- ntax=6                                                                  -->
-	<taxa id="taxa">
-		<taxon id="human"/>
-		<taxon id="chimp"/>
-		<taxon id="bonobo"/>
-		<taxon id="gorilla"/>
-		<taxon id="orangutan"/>
-		<taxon id="siamang"/>
-	</taxa>
-
-	<!-- The sequence alignment (each sequence refers to a taxon above).         -->
-	<!-- ntax=6 nchar=768                                                        -->
-	<alignment id="alignment" dataType="nucleotide">
-		<sequence>
-			<taxon idref="human"/>
-			AGAAATATGTCTGATAAAAGAGTTACTTTGATAGAGTAAATAATAGGAGCTTAAACCCCCTTATTTCTACTAGGACTATGAGAATCGAACCCATCCCTGAGAATCCAAAATTCTCCGTGCCACCTATCACACCCCATCCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTATACCCTTCCCGTACTAAGAAATTTAGGTTAAATACAGACCAAGAGCCTTCAAAGCCCTCAGTAAGTTG-CAATACTTAATTTCTGTAAGGACTGCAAAACCCCACTCTGCATCAACTGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTCTAGACCAATGGGACTTAAACCCACAAACACTTAGTTAACAGCTAAGCACCCTAATCAAC-TGGCTTCAATCTAAAGCCCCGGCAGG-TTTGAAGCTGCTTCTTCGAATTTGCAATTCAATATGAAAA-TCACCTCGGAGCT [...]
-		</sequence>
-		<sequence>
-			<taxon idref="chimp"/>
-			AGAAATATGTCTGATAAAAGAATTACTTTGATAGAGTAAATAATAGGAGTTCAAATCCCCTTATTTCTACTAGGACTATAAGAATCGAACTCATCCCTGAGAATCCAAAATTCTCCGTGCCACCTATCACACCCCATCCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTACACCCTTCCCGTACTAAGAAATTTAGGTTAAGCACAGACCAAGAGCCTTCAAAGCCCTCAGCAAGTTA-CAATACTTAATTTCTGTAAGGACTGCAAAACCCCACTCTGCATCAACTGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTCTAGATTAATGGGACTTAAACCCACAAACATTTAGTTAACAGCTAAACACCCTAATCAAC-TGGCTTCAATCTAAAGCCCCGGCAGG-TTTGAAGCTGCTTCTTCGAATTTGCAATTCAATATGAAAA-TCACCTCAGAGCT [...]
-		</sequence>
-		<sequence>
-			<taxon idref="bonobo"/>
-			AGAAATATGTCTGATAAAAGAATTACTTTGATAGAGTAAATAATAGGAGTTTAAATCCCCTTATTTCTACTAGGACTATGAGAGTCGAACCCATCCCTGAGAATCCAAAATTCTCCGTGCCACCTATCACACCCCATCCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTATACCCTTCCCGTACTAAGAAATTTAGGTTAAACACAGACCAAGAGCCTTCAAAGCTCTCAGTAAGTTA-CAATACTTAATTTCTGTAAGGACTGCAAAACCCCACTCTGCATCAACTGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTCTAGATTAATGGGACTTAAACCCACAAACATTTAGTTAACAGCTAAACACCCTAATCAGC-TGGCTTCAATCTAAAGCCCCGGCAGG-TTTGAAGCTGCTTCTTTGAATTTGCAATTCAATATGAAAA-TCACCTCAGAGCT [...]
-		</sequence>
-		<sequence>
-			<taxon idref="gorilla"/>
-			AGAAATATGTCTGATAAAAGAGTTACTTTGATAGAGTAAATAATAGAGGTTTAAACCCCCTTATTTCTACTAGGACTATGAGAATTGAACCCATCCCTGAGAATCCAAAATTCTCCGTGCCACCTGTCACACCCCATCCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTCACATCCTTCCCGTACTAAGAAATTTAGGTTAAACATAGACCAAGAGCCTTCAAAGCCCTTAGTAAGTTA-CAACACTTAATTTCTGTAAGGACTGCAAAACCCTACTCTGCATCAACTGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTCTAGATCAATGGGACTCAAACCCACAAACATTTAGTTAACAGCTAAACACCCTAGTCAAC-TGGCTTCAATCTAAAGCCCCGGCAGG-TTTGAAGCTGCTTCTTCGAATTTGCAATTCAATATGAAAT-TCACCTCGGAGCT [...]
-		</sequence>
-		<sequence>
-			<taxon idref="orangutan"/>
-			AGAAATATGTCTGACAAAAGAGTTACTTTGATAGAGTAAAAAATAGAGGTCTAAATCCCCTTATTTCTACTAGGACTATGGGAATTGAACCCACCCCTGAGAATCCAAAATTCTCCGTGCCACCCATCACACCCCATCCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTACACCCTTCCCGTACTAAGAAATTTAGGTTA--CACAGACCAAGAGCCTTCAAAGCCCTCAGCAAGTCA-CAGCACTTAATTTCTGTAAGGACTGCAAAACCCCACTTTGCATCAACTGAGCGCAAATCAGCCACTTTAATTAAGCTAAGCCCTCCTAGACCGATGGGACTTAAACCCACAAACATTTAGTTAACAGCTAAACACCCTAGTCAAT-TGGCTTCAGTCCAAAGCCCCGGCAGGCCTTAAAGCTGCTCCTTCGAATTTGCAATTCAACATGACAA-TCACCTCAGGGCT [...]
-		</sequence>
-		<sequence>
-			<taxon idref="siamang"/>
-			AGAAATACGTCTGACGAAAGAGTTACTTTGATAGAGTAAATAACAGGGGTTTAAATCCCCTTATTTCTACTAGAACCATAGGAGTCGAACCCATCCTTGAGAATCCAAAACTCTCCGTGCCACCCGTCGCACCCTGTTCTAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTATACCCTTCCCATACTAAGAAATTTAGGTTAAACACAGACCAAGAGCCTTCAAAGCCCTCAGTAAGTTAACAAAACTTAATTTCTGCAAGGGCTGCAAAACCCTACTTTGCATCAACCGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTCTAGATCGATGGGACTTAAACCCATAAAAATTTAGTTAACAGCTAAACACCCTAAACAACCTGGCTTCAATCTAAAGCCCCGGCAGA-GTTGAAGCTGCTTCTTTGAACTTGCAATTCAACGTGAAAAATCACTTCGGAGCT [...]
-		</sequence>
-	</alignment>
-
-	<!-- The unique patterns from 1 to end                                       -->
-	<!-- npatterns=69                                                            -->
-	<patterns id="patterns" from="1">
-		<alignment idref="alignment"/>
-	</patterns>
-
-	<!-- A prior assumption that the population size has remained constant       -->
-	<!-- throughout the time spanned by the genealogy.                           -->
-	<constantSize id="constant" units="substitutions">
-		<populationSize>
-			<parameter id="popSize" value="0.077" lower="0.0" upper="Infinity"/>
-		</populationSize>
-	</constantSize>
-
-	<newick id="startingTree">
-        (siamang:0.06318470579939774,((gorilla:0.031056173315146828,((chimp:0.00982869388112981,bonobo:0.00982869388112981)
-        :0.01030402958978773,human:0.02013272347091754):0.010923449844229289):0.022256750048834654,orangutan:0.05331292336398148)
-        :0.009871782435416254);
-    </newick>
-
-	<!-- Generate a tree model                                                   -->
-	<treeModel id="treeModel">
-		<coalescentTree idref="startingTree"/>
-		<rootHeight>
-			<parameter id="tree.height"/>
-		</rootHeight>
-		<nodeHeights internalNodes="true">
-			<parameter id="treeModel.internalNodeHeights"/>
-		</nodeHeights>
-		<nodeHeights internalNodes="true" rootNode="true">
-			<parameter id="treeModel.allInternalNodeHeights"/>
-		</nodeHeights>
-	</treeModel>
-
-	<!-- Generate a coalescent likelihood                                        -->
-	<coalescentLikelihood id="coalescent">
-		<model>
-			<constantSize idref="constant"/>
-		</model>
-		<populationTree>
-			<treeModel idref="treeModel"/>
-		</populationTree>
-	</coalescentLikelihood>
-
-	<!-- The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut (2006) PLoS Biology 4, e88 )-->
-	<discretizedBranchRates id="branchRates">
-		<treeModel idref="treeModel"/>
-		<distribution>
-			<logNormalDistributionModel meanInRealSpace="true">
-				<mean>
-					<parameter id="ucld.mean" value="1.0" lower="0.0" upper="Infinity"/>
-				</mean>
-				<stdev>
-					<parameter id="ucld.stdev" value="0.3333333333333333" lower="0.0" upper="Infinity"/>
-				</stdev>
-			</logNormalDistributionModel>
-		</distribution>
-		<rateCategories>
-			<parameter id="branchRates.categories" dimension="10"/>
-		</rateCategories>
-	</discretizedBranchRates>
-	<rateStatistic id="meanRate" name="meanRate" mode="mean" internal="true" external="true">
-		<treeModel idref="treeModel"/>
-		<discretizedBranchRates idref="branchRates"/>
-	</rateStatistic>
-	<rateStatistic id="coefficientOfVariation" name="coefficientOfVariation" mode="coefficientOfVariation" internal="true" external="true">
-		<treeModel idref="treeModel"/>
-		<discretizedBranchRates idref="branchRates"/>
-	</rateStatistic>
-	<rateCovarianceStatistic id="covariance" name="covariance">
-		<treeModel idref="treeModel"/>
-		<discretizedBranchRates idref="branchRates"/>
-	</rateCovarianceStatistic> 
-
-	<!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
-	<HKYModel id="hky">
-		<frequencies>
-			<frequencyModel dataType="nucleotide">
-				<frequencies>
-					<parameter id="hky.frequencies" value="0.25 0.25 0.25 0.25"/>
-				</frequencies>
-			</frequencyModel>
-		</frequencies>
-		<kappa>
-			<parameter id="hky.kappa" value="2.0" lower="0.0" upper="Infinity"/>
-		</kappa>
-	</HKYModel>
-
-	<!-- site model                                                              -->
-	<siteModel id="siteModel">
-		<substitutionModel>
-			<HKYModel idref="hky"/>
-		</substitutionModel>
-	</siteModel>
-
-	<!-- Likelihood for tree given sequence data                                 -->
-	<treeLikelihood id="treeLikelihood" useAmbiguities="false">
-		<patterns idref="patterns"/>
-		<treeModel idref="treeModel"/>
-		<siteModel idref="siteModel"/>
-		<discretizedBranchRates idref="branchRates"/> 
-	</treeLikelihood>
-
-	<!-- Define operators                                                        -->
-	<operators id="operators">
-		<scaleOperator scaleFactor="0.75" weight="0.1">
-			<parameter idref="hky.kappa"/>
-		</scaleOperator>
-		<deltaExchange delta="0.01" weight="0.1">
-			<parameter idref="hky.frequencies"/>
-		</deltaExchange>
-		<subtreeSlide size="0.0077" gaussian="true" weight="15">
-			<treeModel idref="treeModel"/>
-		</subtreeSlide>
-		<narrowExchange weight="15">
-			<treeModel idref="treeModel"/>
-		</narrowExchange>
-		<wideExchange weight="3">
-			<treeModel idref="treeModel"/>
-		</wideExchange>
-		<wilsonBalding weight="3">
-			<treeModel idref="treeModel"/>
-		</wilsonBalding>
-		<scaleOperator scaleFactor="0.75" weight="3">
-			<parameter idref="tree.height"/>
-		</scaleOperator>
-		<uniformOperator weight="30">
-			<parameter idref="treeModel.internalNodeHeights"/>
-		</uniformOperator>
-		<scaleOperator scaleFactor="0.75" weight="3">
-			<parameter idref="popSize"/>
-		</scaleOperator>
-		<upDownOperator scaleFactor="0.75" weight="3">
-			<up>
-			</up>
-			<down>
-				<parameter idref="treeModel.allInternalNodeHeights"/>
-			</down>
-		</upDownOperator>
-		<scaleOperator scaleFactor="0.75" weight="3">
-			<parameter idref="ucld.stdev"/>
-		</scaleOperator> 
-		<uniformIntegerOperator weight="10">
-			<parameter idref="branchRates.categories"/>
-		</uniformIntegerOperator>
-		<swapOperator size="1" weight="10" autoOptimize="false">
-            <parameter idref="branchRates.categories"/>
-        </swapOperator>
-        <randomWalkIntegerOperator windowSize="1.0" weight="10">
-            <parameter idref="branchRates.categories"/>
-        </randomWalkIntegerOperator>		 
-	</operators>
-
-	<!-- Define MCMC                                                             -->
-	<mcmc id="mcmc" chainLength="10000000" autoOptimize="true">
-		<posterior id="posterior">
-			<prior id="prior">
-				<logNormalPrior mean="1.0" stdev="1.25" offset="0.0" meanInRealSpace="false">
-					<parameter idref="hky.kappa"/>
-				</logNormalPrior>
-				<uniformPrior lower="0.0" upper="1.0">
-					<parameter idref="hky.frequencies"/>
-				</uniformPrior>
-				<exponentialPrior mean="0.3333333333333333" offset="0.0">
-					<parameter idref="ucld.stdev"/>
-				</exponentialPrior>  
-				<oneOnXPrior>
-					<parameter idref="popSize"/>
-				</oneOnXPrior> 
-				<coalescentLikelihood idref="coalescent"/>
-			</prior>
-			<likelihood id="likelihood">
-				<treeLikelihood idref="treeLikelihood"/>
-			</likelihood>
-		</posterior>
-		<operators idref="operators"/>
-
-		<!-- write log to screen                                                     -->
-		<log id="screenLog" logEvery="100000">
-			<column label="Posterior" dp="4" width="12">
-				<posterior idref="posterior"/>
-			</column>
-			<column label="Prior" dp="4" width="12">
-				<prior idref="prior"/>
-			</column>
-			<column label="Likelihood" dp="4" width="12">
-				<likelihood idref="likelihood"/>
-			</column>
-			<column label="rootHeight" sf="6" width="12">
-				<parameter idref="tree.height"/>
-			</column>
-			
-		</log>
-
-		<!-- write log to file                                                       -->
-		<log id="fileLog" logEvery="10000" fileName="testUCRelaxedClockLogNormal.log" overwrite="false">
-			<posterior idref="posterior"/>
-			<prior idref="prior"/>
-			<treeLikelihood idref="treeLikelihood"/>
-            <coalescentLikelihood idref="coalescent"/>
-            <parameter idref="popSize"/>
-            <parameter idref="tree.height"/>
-			<parameter idref="hky.kappa"/>
-			<parameter idref="hky.frequencies"/>
-			<parameter idref="ucld.mean"/>
-			<parameter idref="ucld.stdev"/>
-			<rateStatistic idref="meanRate"/>
-			<rateStatistic idref="coefficientOfVariation"/>
-			<rateCovarianceStatistic idref="covariance"/>
-			<parameter idref="branchRates.categories"/> 
-		</log>
-
-		<!-- write tree log to file
-		<logTree id="treeFileLog" logEvery="10000" nexusFormat="true" fileName="testUCRelaxedClockLogNormal.trees" sortTranslationTable="true">
-			<treeModel idref="treeModel"/>
-			<discretizedBranchRates idref="branchRates"/> 
-			<posterior idref="posterior"/>
-		</logTree>  -->
-	</mcmc>
-	<report>
-		<property name="timer">
-			<mcmc idref="mcmc"/>
-		</property>
-	</report>
-</beast>
+<?xml version="1.0" standalone="yes"?>
+
+<!-- Generated by BEAUTi v1.8.3 Prerelease r20150808                         -->
+<!--       by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard         -->
+<!--       Department of Computer Science, University of Auckland and        -->
+<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
+<!--       David Geffen School of Medicine, University of California, Los Angeles-->
+<!--       http://beast.bio.ed.ac.uk/                                        -->
+<beast>
+
+	<!-- The list of taxa to be analysed (can also include dates/ages).          -->
+	<!-- ntax=17                                                                 -->
+	<taxa id="taxa">
+		<taxon id="D4Brazi82">
+			<date value="1982.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal83">
+			<date value="1983.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4ElSal94">
+			<date value="1994.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon76">
+			<date value="1976.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Indon77">
+			<date value="1977.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Mexico84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4NewCal81">
+			<date value="1981.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip64">
+			<date value="1964.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip56">
+			<date value="1956.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Philip84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4PRico86">
+			<date value="1986.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4SLanka78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti79">
+			<date value="1979.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Tahiti85">
+			<date value="1985.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai63">
+			<date value="1963.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai78">
+			<date value="1978.0" direction="forwards" units="years"/>
+		</taxon>
+		<taxon id="D4Thai84">
+			<date value="1984.0" direction="forwards" units="years"/>
+		</taxon>
+	</taxa>
+
+	<!-- The sequence alignment (each sequence refers to a taxon above).         -->
+	<!-- ntax=17 nchar=1485                                                      -->
+	<alignment id="alignment" dataType="nucleotide">
+		<sequence>
+			<taxon idref="D4Brazi82"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal83"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4ElSal94"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon76"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Indon77"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Mexico84"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4NewCal81"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCATG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip64"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip56"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Philip84"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4PRico86"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4SLanka78"/>
+			ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti79"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Tahiti85"/>
+			ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTTACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai63"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai78"/>
+			ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+		<sequence>
+			<taxon idref="D4Thai84"/>
+			ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTGACAGCCACG [...]
+		</sequence>
+	</alignment>
+
+	<!-- The unique patterns from 1 to end                                       -->
+	<!-- npatterns=138                                                           -->
+	<patterns id="patterns" from="1" strip="false">
+		<alignment idref="alignment"/>
+	</patterns>
+
+	<!-- A prior assumption that the population size has remained constant       -->
+	<!-- throughout the time spanned by the genealogy.                           -->
+	<constantSize id="constant" units="years">
+		<populationSize>
+			<parameter id="constant.popSize" value="380.0" lower="0.0"/>
+		</populationSize>
+	</constantSize>
+
+	<!-- Generate a random starting tree under the coalescent process            -->
+	<coalescentSimulator id="startingTree">
+		<taxa idref="taxa"/>
+		<constantSize idref="constant"/>
+	</coalescentSimulator>
+
+	<!-- Generate a tree model                                                   -->
+	<treeModel id="treeModel">
+		<coalescentTree idref="startingTree"/>
+		<rootHeight>
+			<parameter id="treeModel.rootHeight"/>
+		</rootHeight>
+		<nodeHeights internalNodes="true">
+			<parameter id="treeModel.internalNodeHeights"/>
+		</nodeHeights>
+		<nodeHeights internalNodes="true" rootNode="true">
+			<parameter id="treeModel.allInternalNodeHeights"/>
+		</nodeHeights>
+	</treeModel>
+
+	<!-- Generate a coalescent likelihood                                        -->
+	<coalescentLikelihood id="coalescent">
+		<model>
+			<constantSize idref="constant"/>
+		</model>
+		<populationTree>
+			<treeModel idref="treeModel"/>
+		</populationTree>
+	</coalescentLikelihood>
+
+	<!-- The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut (2006) PLoS Biology 4, e88 )-->
+	<discretizedBranchRates id="branchRates">
+		<treeModel idref="treeModel"/>
+		<distribution>
+			<logNormalDistributionModel meanInRealSpace="true">
+				<mean>
+					<parameter id="ucld.mean" value="1.0" lower="0.0"/>
+				</mean>
+				<stdev>
+					<parameter id="ucld.stdev" value="0.3333333333333333" lower="0.0"/>
+				</stdev>
+			</logNormalDistributionModel>
+		</distribution>
+		<rateCategories>
+			<parameter id="branchRates.categories"/>
+		</rateCategories>
+	</discretizedBranchRates>
+	<rateStatistic id="meanRate" name="meanRate" mode="mean" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateStatistic id="coefficientOfVariation" name="coefficientOfVariation" mode="coefficientOfVariation" internal="true" external="true">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateStatistic>
+	<rateCovarianceStatistic id="covariance" name="covariance">
+		<treeModel idref="treeModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</rateCovarianceStatistic>
+
+	<!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
+	<HKYModel id="hky">
+		<frequencies>
+			<frequencyModel dataType="nucleotide">
+				<frequencies>
+					<parameter id="frequencies" value="0.25 0.25 0.25 0.25"/>
+				</frequencies>
+			</frequencyModel>
+		</frequencies>
+		<kappa>
+			<parameter id="kappa" value="2.0" lower="0.0"/>
+		</kappa>
+	</HKYModel>
+
+	<!-- site model                                                              -->
+	<siteModel id="siteModel">
+		<substitutionModel>
+			<HKYModel idref="hky"/>
+		</substitutionModel>
+	</siteModel>
+
+	<!-- Likelihood for tree given sequence data                                 -->
+	<treeLikelihood id="treeLikelihood" useAmbiguities="false">
+		<patterns idref="patterns"/>
+		<treeModel idref="treeModel"/>
+		<siteModel idref="siteModel"/>
+		<discretizedBranchRates idref="branchRates"/>
+	</treeLikelihood>
+
+	<!-- Define operators                                                        -->
+	<operators id="operators" optimizationSchedule="default">
+		<scaleOperator scaleFactor="0.75" weight="1">
+			<parameter idref="kappa"/>
+		</scaleOperator>
+		<deltaExchange delta="0.01" weight="1">
+			<parameter idref="frequencies"/>
+		</deltaExchange>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucld.mean"/>
+		</scaleOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="ucld.stdev"/>
+		</scaleOperator>
+		<subtreeSlide size="38.0" gaussian="true" weight="150">
+			<treeModel idref="treeModel"/>
+		</subtreeSlide>
+		<narrowExchange weight="150">
+			<treeModel idref="treeModel"/>
+		</narrowExchange>
+		<wideExchange weight="30">
+			<treeModel idref="treeModel"/>
+		</wideExchange>
+		<wilsonBalding weight="30">
+			<treeModel idref="treeModel"/>
+		</wilsonBalding>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="treeModel.rootHeight"/>
+		</scaleOperator>
+		<uniformOperator weight="300">
+			<parameter idref="treeModel.internalNodeHeights"/>
+		</uniformOperator>
+		<scaleOperator scaleFactor="0.75" weight="30">
+			<parameter idref="constant.popSize"/>
+		</scaleOperator>
+		<upDownOperator scaleFactor="0.75" weight="30">
+			<up>
+				<parameter idref="ucld.mean"/>
+			</up>
+			<down>
+				<parameter idref="treeModel.allInternalNodeHeights"/>
+			</down>
+		</upDownOperator>
+		<swapOperator size="1" weight="100" autoOptimize="false">
+			<parameter idref="branchRates.categories"/>
+		</swapOperator>
+		<uniformIntegerOperator weight="100">
+			<parameter idref="branchRates.categories"/>
+		</uniformIntegerOperator>
+	</operators>
+
+	<!-- Define MCMC                                                             -->
+	<mcmc id="mcmc" chainLength="10000000" autoOptimize="true" operatorAnalysis="testUCRelaxedClockLogNormal.ops">
+		<posterior id="posterior">
+			<prior id="prior">
+				<logNormalPrior mean="1.0" stdev="1.25" offset="0.0" meanInRealSpace="false">
+					<parameter idref="kappa"/>
+				</logNormalPrior>
+				<uniformPrior lower="0.0" upper="1.0">
+					<parameter idref="frequencies"/>
+				</uniformPrior>
+				<exponentialPrior mean="0.3333333333333333" offset="0.0">
+					<parameter idref="ucld.stdev"/>
+				</exponentialPrior>
+				<ctmcScalePrior>
+					<ctmcScale>
+						<parameter idref="ucld.mean"/>
+					</ctmcScale>
+					<treeModel idref="treeModel"/>
+				</ctmcScalePrior>
+				<oneOnXPrior>
+					<parameter idref="constant.popSize"/>
+				</oneOnXPrior>
+				<coalescentLikelihood idref="coalescent"/>
+			</prior>
+			<likelihood id="likelihood">
+				<treeLikelihood idref="treeLikelihood"/>
+				<discretizedBranchRates idref="branchRates"/>
+			</likelihood>
+		</posterior>
+		<operators idref="operators"/>
+
+		<!-- write log to screen                                                     -->
+		<log id="screenLog" logEvery="1000">
+			<column label="Posterior" dp="4" width="12">
+				<posterior idref="posterior"/>
+			</column>
+			<column label="Prior" dp="4" width="12">
+				<prior idref="prior"/>
+			</column>
+			<column label="Likelihood" dp="4" width="12">
+				<likelihood idref="likelihood"/>
+			</column>
+			<column label="rootHeight" sf="6" width="12">
+				<parameter idref="treeModel.rootHeight"/>
+			</column>
+			<column label="ucld.mean" sf="6" width="12">
+				<parameter idref="ucld.mean"/>
+			</column>
+		</log>
+
+		<!-- write log to file                                                       -->
+		<log id="fileLog" logEvery="1000" fileName="testUCRelaxedClockLogNormal.log" overwrite="false">
+			<posterior idref="posterior"/>
+			<prior idref="prior"/>
+			<likelihood idref="likelihood"/>
+			<parameter idref="treeModel.rootHeight"/>
+			<parameter idref="constant.popSize"/>
+			<parameter idref="kappa"/>
+			<parameter idref="frequencies"/>
+			<parameter idref="ucld.mean"/>
+			<parameter idref="ucld.stdev"/>
+			<rateStatistic idref="meanRate"/>
+			<rateStatistic idref="coefficientOfVariation"/>
+			<rateCovarianceStatistic idref="covariance"/>
+			<treeLikelihood idref="treeLikelihood"/>
+			<discretizedBranchRates idref="branchRates"/>
+			<coalescentLikelihood idref="coalescent"/>
+		</log>
+
+		<!-- write tree log to file                                                  -->
+		<logTree id="treeFileLog" logEvery="1000" nexusFormat="true" fileName="testUCRelaxedClockLogNormal.trees" sortTranslationTable="true">
+			<treeModel idref="treeModel"/>
+			<trait name="rate" tag="rate">
+				<discretizedBranchRates idref="branchRates"/>
+			</trait>
+			<posterior idref="posterior"/>
+		</logTree>
+	</mcmc>
+	<report>
+		<property name="timer">
+			<mcmc idref="mcmc"/>
+		</property>
+	</report>
+</beast>
diff --git a/examples/release/clockModels/testUncorrelatedRelaxedClock.xml b/examples/release/clockModels/testUncorrelatedRelaxedClock.xml
deleted file mode 100644
index c9fb0e4..0000000
--- a/examples/release/clockModels/testUncorrelatedRelaxedClock.xml
+++ /dev/null
@@ -1,418 +0,0 @@
-<?xml version="1.0" standalone="yes"?>
-<!--
-  ~ testUncorrelatedRelaxedClock.xml
-  ~
-  ~ Copyright (C) 2002-2009 Alexei Drummond and Andrew Rambaut
-  ~
-  ~ This file is part of BEAST.
-  ~ See the NOTICE file distributed with this work for additional
-  ~ information regarding copyright ownership and licensing.
-  ~
-  ~ BEAST is free software; you can redistribute it and/or modify
-  ~ it under the terms of the GNU Lesser General Public License as
-  ~ published by the Free Software Foundation; either version 2
-  ~ of the License, or (at your option) any later version.
-  ~
-  ~ BEAST is distributed in the hope that it will be useful,
-  ~ but WITHOUT ANY WARRANTY; without even the implied warranty of
-  ~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  ~ GNU Lesser General Public License for more details.
-  ~
-  ~ You should have received a copy of the GNU Lesser General Public
-  ~ License along with BEAST; if not, write to the
-  ~ Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
-  ~ Boston, MA  02110-1301  USA
-  -->
-
-<!-- Generated by BEAUTi v1.4.8                                              -->
-<!--       by Alexei J. Drummond and Andrew Rambaut                          -->
-<!--       Department of Computer Science, University of Auckland and        -->
-<!--       Institute of Evolutionary Biology, University of Edinburgh        -->
-<!--       http://beast.bio.ed.ac.uk/                                        -->
-<beast>
-
-    <!-- The list of taxa analyse (can also include dates/ages).                 -->
-    <!-- ntax=17                                                                 -->
-    <taxa id="taxa">
-        <taxon id="D4Brazi82">
-            <date value="1982.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4ElSal83">
-            <date value="1983.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4ElSal94">
-            <date value="1994.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Indon76">
-            <date value="1976.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Indon77">
-            <date value="1977.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Mexico84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4NewCal81">
-            <date value="1981.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip64">
-            <date value="1964.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip56">
-            <date value="1956.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Philip84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4PRico86">
-            <date value="1986.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4SLanka78">
-            <date value="1978.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Tahiti79">
-            <date value="1979.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Tahiti85">
-            <date value="1985.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai63">
-            <date value="1963.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai78">
-            <date value="1978.0" direction="forwards" units="years"/>
-        </taxon>
-        <taxon id="D4Thai84">
-            <date value="1984.0" direction="forwards" units="years"/>
-        </taxon>
-    </taxa>
-
-    <!-- The sequence alignment (each sequence refers to a taxon above).         -->
-    <!-- ntax=17 nchar=1485                                                      -->
-    <alignment id="alignment" dataType="nucleotide">
-        <sequence>
-            <taxon idref="D4Brazi82"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4ElSal83"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4ElSal94"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATAGCCCAGGGAAAACCAACCTTGGATTTTGAATTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGGAATGGCTGTGGCTTGCTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGATACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Indon76"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Indon77"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTAAAAGAGGAACAAGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Mexico84"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTAGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGCTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4NewCal81"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip64"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGATACATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip56"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTAGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Philip84"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACTTAGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCAACAAGATGCCCAACGCAAGGAGAACCTTATCTCAAAGAGGAACAAGATCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTCTCATGCTCGGGAAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATATACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4PRico86"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGACCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGGGAGCCCTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAGTACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4SLanka78"/>
-            ATGCGATGCGTGGGAGTGGGGAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCCTGTCTCAAAGAGGAACAGGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCCTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCGAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAACCACGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Tahiti79"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATTTGGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Tahiti85"/>
-            ATGCGATGCGTAGGAGTAGGAAACAGAGACTTTGTGGAAGGAGTTTCAGGTGGAGCATGGGTCGATTTGGTGCTAGAACATGGAGGATGCGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGACTAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCAATATCAAACATAACTACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTATCTGAAAGAGGAACAGGACCAACAGTACATTTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTGTTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGTTCGGGGAAGATAACAGGCAATCTGGTCCAAATTGAGAACCTTGAATACACAGTGGTCATAACAGTCCACAATGGAGACACCCATGCAGTAGGAAATGACACATCCAATCATGGAGTT [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai63"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGGGACTTTGTGGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCTCAAGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCGATATCAAACATAACCACGGCGACAAGATGTCCAACGCAAGGAGAGCCTTATCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGATGTGGTAGACAGAGGGTGGGGCAATGGCTGTGGCTTACTTGGAAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTGGTTCGAATTGAGAACCTTGAATACACAGTGGTTGTGACAGTCCACAACGGAGACACCCATGCAGTAGGAAATGACATATCCAACCATGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai78"/>
-            ATGCGATGCGTAGGAGTGGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCTTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGATAGAGGGTGGGGCAACGGCTGTGGCTTGCTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACAGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTG [...]
-        </sequence>
-        <sequence>
-            <taxon idref="D4Thai84"/>
-            ATGCGATGCGTAGGAGTAGGGAACAGAGACTTTGTAGAAGGAGTCTCAGGTGGAGCATGGGTCGATCTGGTGCTAGAACATGGAGGATGTGTCACAACCATGGCCCAGGGAAAACCAACCCTGGATTTTGAACTGATCAAGACAACAGCCAAGGAAGTGGCTCTGTTAAGAACCTATTGCATTGAAGCCTCCATATCAAACATAACCACGGCAACAAGATGTCCAACGCAAGGAGAGCCTTACCTCAAAGAGGAACAAGATCAACAGTACATCTGCCGGAGAGACGTGGTAGACAGAGGGTGGGGCAACGGCTGTGGCTTGTTTGGGAAAGGAGGAGTTGTGACATGTGCGAAGTTTTCATGCTCGGGGAAGATAACGGGCAACTTAGTCCAAATTGAGAACCTTGAATACACAGTGGTTGTAACAGTCCACAATGGAGACACCCATGCTGTAGGAAATGATACATCCAACCACGGAGTG [...]
-        </sequence>
-    </alignment>
-
-    <!-- The unique patterns for all positions                                   -->
-    <!-- npatterns=138                                                           -->
-    <patterns id="patterns" from="1">
-        <alignment idref="alignment"/>
-    </patterns>
-
-    <!-- A prior assumption that the population size has remained constant       -->
-    <!-- throughout the time spanned by the genealogy.                           -->
-    <constantSize id="constant" units="years">
-        <populationSize>
-            <parameter id="constant.popSize" value="380.0" lower="0.0" upper="38000.0"/>
-        </populationSize>
-    </constantSize>
-
-    <!-- Generate a random starting tree under the coalescent process            -->
-    <coalescentTree id="startingTree">
-        <taxa idref="taxa"/>
-        <constantSize idref="constant"/>
-    </coalescentTree>
-
-    <treeModel id="treeModel">
-        <coalescentTree idref="startingTree"/>
-        <rootHeight>
-            <parameter id="treeModel.rootHeight"/>
-        </rootHeight>
-        <nodeHeights internalNodes="true">
-            <parameter id="treeModel.internalNodeHeights"/>
-        </nodeHeights>
-        <nodeHeights internalNodes="true" rootNode="true">
-            <parameter id="treeModel.allInternalNodeHeights"/>
-        </nodeHeights>
-    </treeModel>
-
-    <coalescentLikelihood id="coalescent">
-        <model>
-            <constantSize idref="constant"/>
-        </model>
-        <populationTree>
-            <treeModel idref="treeModel"/>
-        </populationTree>
-    </coalescentLikelihood>
-
-    <!-- The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut, 2006) -->
-    <discretizedBranchRates id="branchRates">
-        <treeModel idref="treeModel"/>
-        <distribution>
-            <logNormalDistributionModel meanInRealSpace="true">
-                <mean>
-                    <parameter id="ucld.mean" value="2.3E-5" lower="0.0" upper="100.0"/>
-                </mean>
-                <stdev>
-                    <parameter id="ucld.stdev" value="0.1" lower="0.0" upper="10.0"/>
-                </stdev>
-            </logNormalDistributionModel>
-        </distribution>
-        <rateCategories>
-            <parameter id="branchRates.categories" dimension="32"/>
-        </rateCategories>
-    </discretizedBranchRates>
-
-    <rateStatistic id="meanRate" name="meanRate" mode="mean" internal="true" external="true">
-        <treeModel idref="treeModel"/>
-        <discretizedBranchRates idref="branchRates"/>
-    </rateStatistic>
-
-    <rateStatistic id="coefficientOfVariation" name="coefficientOfVariation" mode="coefficientOfVariation"
-                   internal="true" external="true">
-        <treeModel idref="treeModel"/>
-        <discretizedBranchRates idref="branchRates"/>
-    </rateStatistic>
-
-    <rateCovarianceStatistic id="covariance" name="covariance">
-        <treeModel idref="treeModel"/>
-        <discretizedBranchRates idref="branchRates"/>
-    </rateCovarianceStatistic>
-
-    <!-- The HKY substitution model (Hasegawa, Kishino & Yano, 1985)             -->
-    <hkyModel id="hky">
-        <frequencies>
-            <frequencyModel dataType="nucleotide">
-                <alignment idref="alignment"/>
-                <frequencies>
-                    <parameter id="hky.frequencies" dimension="4"/>
-                </frequencies>
-            </frequencyModel>
-        </frequencies>
-        <kappa>
-            <parameter id="hky.kappa" value="1.0" lower="0.0" upper="100.0"/>
-        </kappa>
-    </hkyModel>
-
-    <!-- site model                                                              -->
-    <siteModel id="siteModel">
-        <substitutionModel>
-            <hkyModel idref="hky"/>
-        </substitutionModel>
-
-    </siteModel>
-
-    <treeLikelihood id="treeLikelihood">
-        <patterns idref="patterns"/>
-        <treeModel idref="treeModel"/>
-        <siteModel idref="siteModel"/>
-        <discretizedBranchRates idref="branchRates"/>
-    </treeLikelihood>
-
-    <operators id="operators">
-        <scaleOperator scaleFactor="0.75" weight="1">
-            <parameter idref="hky.kappa"/>
-        </scaleOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="ucld.mean"/>
-        </scaleOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="ucld.stdev"/>
-        </scaleOperator>
-        <upDownOperator scaleFactor="0.75" weight="3">
-            <up>
-                <parameter idref="ucld.mean"/>
-            </up>
-            <down>
-                <parameter idref="treeModel.allInternalNodeHeights"/>
-            </down>
-        </upDownOperator>
-        <swapOperator size="1" weight="10" autoOptimize="false">
-            <parameter idref="branchRates.categories"/>
-        </swapOperator>
-        <randomWalkIntegerOperator windowSize="1.0" weight="10">
-            <parameter idref="branchRates.categories"/>
-        </randomWalkIntegerOperator>
-        <uniformIntegerOperator weight="10">
-            <parameter idref="branchRates.categories"/>
-        </uniformIntegerOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="constant.popSize"/>
-        </scaleOperator>
-        <scaleOperator scaleFactor="0.75" weight="3">
-            <parameter idref="treeModel.rootHeight"/>
-        </scaleOperator>
-        <uniformOperator weight="30">
-            <parameter idref="treeModel.internalNodeHeights"/>
-        </uniformOperator>
-        <subtreeSlide size="38.0" gaussian="true" weight="15">
-            <treeModel idref="treeModel"/>
-        </subtreeSlide>
-        <narrowExchange weight="15">
-            <treeModel idref="treeModel"/>
-        </narrowExchange>
-        <wideExchange weight="3">
-            <treeModel idref="treeModel"/>
-        </wideExchange>
-        <wilsonBalding weight="3">
-            <treeModel idref="treeModel"/>
-        </wilsonBalding>
-    </operators>
-
-    <mcmc id="mcmc" chainLength="10000000" autoOptimize="true">
-        <posterior id="posterior">
-            <prior id="prior">
-                <coalescentLikelihood idref="coalescent"/>
-            </prior>
-            <likelihood id="likelihood">
-                <treeLikelihood idref="treeLikelihood"/>
-            </likelihood>
-        </posterior>
-        <operators idref="operators"/>
-        <log id="screenLog" logEvery="10000">
-            <column label="Posterior" dp="4" width="12">
-                <posterior idref="posterior"/>
-            </column>
-            <column label="Prior" dp="4" width="12">
-                <prior idref="prior"/>
-            </column>
-            <column label="Likelihood" dp="4" width="12">
-                <likelihood idref="likelihood"/>
-            </column>
-            <column label="Root Height" sf="6" width="12">
-                <parameter idref="treeModel.rootHeight"/>
-            </column>
-            <column label="Rate" sf="6" width="12">
-                <rateStatistic idref="meanRate"/>
-            </column>
-        </log>
-        <log id="fileLog" logEvery="10000" fileName="testUncorrelatedRelaxedClock.log">
-            <posterior idref="posterior"/>
-            <prior idref="prior"/>
-            <likelihood idref="likelihood"/>
-            <rateStatistic idref="meanRate"/>
-            <parameter idref="treeModel.rootHeight"/>
-            <parameter idref="constant.popSize"/>
-            <parameter idref="hky.kappa"/>
-            <parameter idref="ucld.mean"/>
-            <parameter idref="ucld.stdev"/>
-            <rateStatistic idref="coefficientOfVariation"/>
-            <rateCovarianceStatistic idref="covariance"/>
-            <treeLikelihood idref="treeLikelihood"/>
-            <coalescentLikelihood idref="coalescent"/>
-        </log>
-        <logTree id="treeFileLog" logEvery="10000" nexusFormat="true" fileName="testUncorrelatedRelaxedClock.trees"
-                 sortTranslationTable="true">
-            <treeModel idref="treeModel"/>
-            <discretizedBranchRates idref="branchRates"/>
-            <posterior idref="posterior"/>
-        </logTree>
-    </mcmc>
-
-    <report>
-        <property name="timer">
-            <object idref="mcmc"/>
-        </property>
-    </report>
-
-    <!--
-     To check that the program is working, the results are compared to previously
-     run results. The following are for a run of 10,000,000 steps:
-
-     27.523033333333334 minutes
-
-     burnIn=1000000
-     maxState=10000000
-
-     statistic                mean          hpdLower      hpdUpper      ESS
-     posterior                -3927.81      -3936.95      -3919.67      4037.42
-     ucld.mean                8.28472E-4    5.95169E-4    1.06184E-3    2306.96
-     ucld.stdev               0.17435       1.06719E-5    0.41554       2874.44
-     meanRate                 8.09909E-4    6.015E-4      1.03819E-3    2343.34
-     coefficientOfVariation   0.15982       9.94436E-6    0.37834       2880.80
-     covariance               -3.81803E-2   -0.36405      0.30941       8617.52
-     constant.popSize         37.3524       16.7320       61.1342       4073.34
-     hky.kappa                18.3053       11.7010       25.1850       7171.51
-     treeModel.rootHeight     69.2953       54.2928       85.8209       2560.98
-     treeLikelihood           -3855.78      -3862.64      -3849.29      7889.16
-     coalescent               -72.0313      -77.6206      -66.4644      2383.69
-     -->
-
-    <traceAnalysis fileName="testUncorrelatedRelaxedClock.log">
-        <expectation name="posterior" value="-3927.81"/>
-        <expectation name="ucld.mean" value="8.28472E-4"/>
-        <expectation name="ucld.stdev" value="0.17435"/>
-        <expectation name="meanRate" value="8.09909E-4"/>
-        <expectation name="coefficientOfVariation" value="0.15982"/>
-        <expectation name="covariance" value="-3.81803E-2"/>
-        <expectation name="constant.popSize" value="37.3524"/>
-        <expectation name="hky.kappa" value="18.3053"/>
-        <expectation name="treeModel.rootHeight" value="69.2953"/>
-        <expectation name="treeLikelihood" value="-3855.78"/>
-        <expectation name="skyline" value="-72.0313"/>
-    </traceAnalysis>
-
-    <treeTraceAnalysis fileName="testUncorrelatedRelaxedClock.trees"/>
-
-    <marginalLikelihoodAnalysis fileName="testUncorrelatedRelaxedClock.log">
-        <likelihoodColumn name="treeLikelihood" harmonicOnly="false" bootstrap="true"/>
-    </marginalLikelihoodAnalysis>
-
-</beast>
diff --git a/release/common/README.txt b/release/common/README.txt
index a9687a3..3a84394 100644
--- a/release/common/README.txt
+++ b/release/common/README.txt
@@ -1,4 +1,4 @@
-                    BEAST v1.8.2 2002-2015
+                    BEAST v1.8.3 2002-2016
         Bayesian Evolutionary Analysis Sampling Trees
                               by
       Alexei J. Drummond, Andrew Rambaut & Marc Suchard
@@ -16,7 +16,7 @@
                       msuchard at ucla.edu
 
 
-Last updated: a.rambaut at ed.ac.uk - 5th March 2015
+Last updated: a.rambaut at ed.ac.uk - 13th February 2016
 
 Contents:
 1) INTRODUCTION
@@ -129,6 +129,7 @@ BEAST arguments:
      -errors         "Specify maximum number of numerical errors before stopping"
      -threads        "The number of computational threads to use (default auto)"
      -java           "Use Java only, no native implementations"
+     -threshold      "Full evaluation test threshold (default 0.1)"
      -beagle         "Use BEAGLE library if available (default on)"
      -beagle_off     "Don't use BEAGLE library"
      -beagle_info          "BEAGLE: show information on available resources"
@@ -137,10 +138,23 @@ BEAST arguments:
      -beagle_CPU           "BEAGLE: use CPU instance"
      -beagle_GPU           "BEAGLE: use GPU instance if available"
      -beagle_SSE           "BEAGLE: use SSE extensions if available"
+     -beagle_SSE_off       "BEAGLE: turn off use of SSE extensions"
+     -beagle_cuda          "BEAGLE: use CUDA parallization if available"
+     -beagle_opencl        "BEAGLE: use OpenCL parallization if available"
      -beagle_single        "BEAGLE: use single precision if available"
      -beagle_double        "BEAGLE: use double precision if available"
+     -beagle_async         "BEAGLE: use asynchronous kernels if available"
      -beagle_scaling       "BEAGLE: specify scaling scheme to use"
-     -help"          "Print this information and stop"
+     -beagle_rescale       "BEAGLE: frequency of rescaling (dynamic scaling only)"
+     -mc3_chains    "number of chains"
+     -mc3_delta     "temperature increment parameter"
+     -mc3_temperatures     "a comma-separated list of the hot chain temperatures"
+     -mc3_swap      "frequency at which chains temperatures will be swapped"
+     -load_dump     "Specify a filename to load a dumped state from"
+     -dump_state    "Specify a state at which to write a dump file"
+     -dump_every    "Specify a frequency to write a dump file"
+     -version       "Print the version and credits and stop"
+     -help"         "Print this information and stop"
 
 For example:
 
@@ -206,7 +220,7 @@ The website for beast is here:
 
 Source code distributed under the GNU Lesser General Public License:
 
-<http://code.google.com/p/beast-mcmc/>
+<https://github.com/beast-dev/beast-mcmc/>
 
 ___________________________________________________________________________
 8) ACKNOWLEDGMENTS
diff --git a/release/common/VERSION HISTORY.txt b/release/common/VERSION HISTORY.txt
index 4c89ce8..b2214b3 100644
--- a/release/common/VERSION HISTORY.txt	
+++ b/release/common/VERSION HISTORY.txt	
@@ -1,4 +1,4 @@
-                    BEAST v1.8.2 2002-2015
+                    BEAST v1.8.3 2002-2016
         Bayesian Evolutionary Analysis Sampling Trees
                               by
       Alexei J. Drummond, Andrew Rambaut & Marc A. Suchard
@@ -17,8 +17,39 @@
 
 
 Version History
-Last updated: a.rambaut at ed.ac.uk - 5th March 2015
-All issues can be viewed at http://code.google.com/p/beast-mcmc/issues/list
+Last updated: a.rambaut at ed.ac.uk - 13th February 2016
+All issues can be viewed at https://github.com/beast-dev/beast-mcmc/issues
+
+================================================================================
+
+Version 1.8.3 released 13th February 2016
+
+    New Features:
+        Generalized Stepping Stone sampling for Marginal Likelihood Estimation.
+		Continuous quantile version of uncorrelated relaxed clock as option in
+		BEAUti [from Li & Drummond 2012, MBE].
+		Option in BEAUti to log complete histories in Markov Jump Counting.
+        Jukes-Cantor added as an option for BEAUti nucleotide substition model.
+        New tree transition kernel (SubTreeLeap operator) implementd in BEAST.
+        Defaulting to randomizing rate categories for DiscretizedBranchRates.
+        BEAUti 'guess dates' options are now persistent from run to run (and
+        shared with Path-O-Gen/Tempest).
+        Transmission tree model of Hall et al (2016) implemented.
+        Fast, general multidimensional scaling (MDS) implemented.
+        Clock panel in BEAUti simplified.
+        Parameter linking available in Priors panel in BEAUti.
+        Command line is logged into log file headers for reference.
+
+	Bug Fixes:
+	    Issue 779:  Ancestral sequence reconstruction broken with
+	                <mergePatterns> elements.
+        Issue 772:  BEAUTi should include BranchRatesModels in prior density.
+        Issue 768:  BEAUti creates a subtree slide operator with size zero when
+                    data is invariable.
+        Issue 758:  Full evaluation error.
+        Issue 304:  Label individual transition rate dimensions for CTM models
+                    in log files.
+
 
 ================================================================================
 
@@ -67,7 +98,7 @@ Version 1.8.1 released 20th September 2014
 					traits.	
 		Issue 722:	Complex charsets in nexus files don't parse.
 		Issue 714:	sets block not recognized in 1.8.
-		Issue 709:	MCMC tab in Beautie v1.8.0 problem with multiple unlinked 
+		Issue 709:	MCMC tab in Beautie v1.8.0 problem with multiple unlinked
 					loci.
 
 ================================================================================
diff --git a/release_tempest/Linux/icons/pathogen.png b/release_tempest/Linux/icons/pathogen.png
new file mode 100644
index 0000000..90d8e42
Binary files /dev/null and b/release_tempest/Linux/icons/pathogen.png differ
diff --git a/release_tempest/Linux/scripts/pathogen b/release_tempest/Linux/scripts/pathogen
new file mode 100755
index 0000000..31e64b7
--- /dev/null
+++ b/release_tempest/Linux/scripts/pathogen
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+if [ -z "$PATHOGEN" ]; then
+	## resolve links - $0 may be a link to application
+	PRG="$0"
+
+	# need this for relative symlinks
+	while [ -h "$PRG" ] ; do
+	    ls=`ls -ld "$PRG"`
+	    link=`expr "$ls" : '.*-> \(.*\)$'`
+	    if expr "$link" : '/.*' > /dev/null; then
+		PRG="$link"
+	    else
+		PRG="`dirname "$PRG"`/$link"
+	    fi
+	done
+
+	# make it fully qualified
+	saveddir=`pwd`
+	PATHOGEN0=`dirname "$PRG"`
+	PATHOGEN=`cd "$PATHOGEN0" && pwd`
+	cd "$saveddir"
+fi
+
+PATHOGEN_LIB=$PATHOGEN/lib
+java -Xms64m -Xmx128m -jar $PATHOGEN_LIB/pathogen.jar $*
+
diff --git a/release_tempest/common/README.txt b/release_tempest/common/README.txt
new file mode 100644
index 0000000..7bed074
--- /dev/null
+++ b/release_tempest/common/README.txt
@@ -0,0 +1,92 @@
+                      Path-O-Gen v1.2 2009
+                Temporal Signal Investigation Tool
+                              by
+                       Andrew Rambaut
+
+              Institute of Evolutionary Biology
+                    University of Edinburgh
+                      a.rambaut at ed.ac.uk
+
+UNIX / Mac OS X / Linux / Windows README 
+a.rambaut at ed.ac.uk - 27 November 2009
+
+Contents:
+1) INTRODUCTION
+2) INSTALLING AND RUNNING PATH-O-GEN
+3) ANALYSING TREES
+4) VERSION HISTORY
+5) SUPPORT & LINKS
+6) ACKNOWLEDGMENTS
+
+___________________________________________________________________________
+1) INTRODUCTION
+
+Path-O-Gen is a tool for investigating the temporal signal and 'clocklikeness' of molecular phylogenies. It can read and analyse contemporaneous trees (where all sequences have been collected at the same time) and dated-tip trees (where sequences have been collected at different dates). It is designed for analysing trees that have not been inferred under a molecular-clock assumption to see how valid this assumption may be. It can also root the tree at the position that is likely to be th [...]
+
+___________________________________________________________________________
+2) INSTALLING AND RUNNING PATH-O-GEN
+
+Mac OS X: To install Path-O-Gen, simply drag the program file to where you normally put applications. Then double click to run.
+
+Windows: To install Path-O-Gen, simply drag the program file to where you normally put applications. Then double click to run.
+
+Linux / UNIX: Copy or move the folder to where you normally put applications and then double click the "pathogen.jar" file (in the lib/ directory) to run or type "./pathogen" at the command-line. 
+
+___________________________________________________________________________
+3) ANALYSING TREES
+
+Once Path-O-Gen is running it will ask for a tree file to load. This should be in NEXUS format and should have been constructed using a phylogenetic method that does not assume a molecular clock (such as Neighbor-Joining or Maximum Likelihood or Bayesian methods with the molecular clock option off. It is also important that the trees contain branch lengths as genetic distance (substitutions per site). 
+
+When the tree is loaded you will see a table containg all the taxa (sequence labels). If the sequences are contemporaneous (i.e., not sampled through time) then you can leave this as it is. If the sequences have dates associated with them you can enter them into this table. If the taxon labels have the dates encoded in them, you can use the "Guess Dates" button to try and extract them. The final thing you need to set here is whether the dates are "Since some time in the past" - which the [...]
+
+You can now select the "Trees" tab at the top of the window and you will see your tree, along with a table of statistics on the left. The nature of the statistics will depend on whether the tree has contemporaneous tips or dated tips. If it is a contemporaneous tree then the statistics will include the mean and variance of the root-to-tip distances for all tips. If it has dated tips then the table will contain various details of a regression of root-to-tip distances against dates of samp [...]
+
+Selecting the "Best-fitting root" button at the top left of the window will attempt to find the root of the tree that gives the best fit to the hypothesis that the data have a roughly constant rate of evolution. For contemporaneous trees this will find the root which minimizes the variance of root-to-tip variance. For dated tips this will be the root which maximizes the correlation of root-to-tip distance to sampling date. You can also select the "Root-to-tip" tab which will show you a c [...]
+
+Finally, you can export the tree (rooted as displayed) using the "Export Tree..." option in the file menu and export the raw root to tip data using the "Export Data..." option. To obtain a graphic of the displayed tree or chart, you can use the Print option and then "Save as PDF..." or similar option depending on the operating system being used.
+
+___________________________________________________________________________
+4) VERSION HISTORY
+
+---Version 1.2 27 November 2009---
+
+* Added the ability to select points in the plots and the equivalent taxa will be highlighted in the tree (and vice-versa).
+
+* Added a residual plot for time-sampled trees. This shows the distribution of residual from the regression line to look for outliers.
+
+---Version 1.1 23 February 2009---
+
+* Added a more flexible tree viewing component (based on FigTree)
+
+* Tips of a dated tip tree are now shown coloured by their residual from the root to tip regression line (blue: above, red: below, black on the regression).
+
+---Version 1.0 12 February 2009---
+
+* First released verson
+
+___________________________________________________________________________
+5) SUPPORT & LINKS
+
+Please email me to discuss any problems:
+
+a.rambaut at ed.ac.uk
+
+___________________________________________________________________________
+6) ACKNOWLEDGMENTS
+
+Thanks to the following for supplying code or assisting with the creation or testing of BEAST and its associated software:
+
+	Alexander Alekseyenko
+	Erik Bloomquist
+	Roald Forsberg
+	Joseph Heled
+	Simon Ho
+	Philippe Lemey
+	Gerton Lunter
+	Sidney Markowitz
+	Tulio de Oliveira
+	Oliver Pybus
+	Beth Shapiro
+	Korbinian Strimmer
+	Marc Suchard
+	+ numerous other users who have kindly helped make BEAST better.
diff --git a/src/dr/app/beagle/evomodel/branchmodel/BranchAssignmentModel.java b/src/dr/app/beagle/evomodel/branchmodel/BranchAssignmentModel.java
index 67b4d76..f0fac83 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/BranchAssignmentModel.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/BranchAssignmentModel.java
@@ -1,5 +1,8 @@
 package dr.app.beagle.evomodel.branchmodel;
 
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
 import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -7,7 +10,9 @@ import java.util.List;
 import dr.app.beagle.evomodel.parsers.BranchAssignmentModelParser;
 import dr.app.beagle.evomodel.substmodel.FrequencyModel;
 import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
+import dr.evolution.io.NexusImporter;
 import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
 import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
@@ -57,6 +62,20 @@ public class BranchAssignmentModel extends AbstractModel implements BranchModel
 		// substitutionModels.add(null);
 		// }
 		
+//	try {	
+//		
+//		File file = new File("/home/filip/Dropbox/BeagleSequenceSimulator/branchSpecificSimulations/annotated_tree.nexus");
+//		BufferedReader reader;
+//		
+//		reader = new BufferedReader(new FileReader(file));
+//		NexusImporter importer = new NexusImporter(reader);
+//		Tree tree = importer.importTree(null);
+//		this.treeModel = new TreeModel(tree);
+//		
+//	} catch ( Exception e) {
+//		e.printStackTrace();
+//	} 
+		
 		for (NodeRef node : this.treeModel.getNodes()) {
 			if (!treeModel.isRoot(node)) {
 
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
index 414bbd7..7aa2f1c 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
@@ -25,9 +25,7 @@
 
 package dr.app.beagle.evomodel.branchmodel.lineagespecific;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 import beagle.Beagle;
 import beagle.BeagleFactory;
@@ -409,6 +407,11 @@ public class BeagleBranchLikelihood implements Likelihood {
 	}
 
 	@Override
+	public Set<Likelihood> getLikelihoodSet() {
+		return new HashSet<Likelihood>(Arrays.asList(this));
+	}
+
+	@Override
 	public boolean isUsed() {
 		return used;
 	}
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BranchLikelihood.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BranchLikelihood.java
index 7658659..9b955d7 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BranchLikelihood.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BranchLikelihood.java
@@ -25,8 +25,7 @@
 
 package dr.app.beagle.evomodel.branchmodel.lineagespecific;
 
-import java.util.LinkedList;
-import java.util.List;
+import java.util.*;
 
 import dr.app.beagle.evomodel.branchmodel.BranchModel;
 import dr.app.beagle.evomodel.sitemodel.SiteRateModel;
@@ -171,6 +170,11 @@ public class BranchLikelihood implements Likelihood {
 	}// END: prettyName
 
 	@Override
+	public Set<Likelihood> getLikelihoodSet() {
+		return new HashSet<Likelihood>(Arrays.asList(this));
+	}
+
+	@Override
 	public boolean isUsed() {
 		return true;
 	}// END: isUsed
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperator.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperator.java
index ac920c8..5e22a3c 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperator.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperator.java
@@ -51,17 +51,17 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 	private double intensity;
 	private int mhSteps;
 
-	private Parameter zParameter;
-	// private CountableRealizationsParameter countableRealizationsParameter;
-	private Parameter parameter;
+	private Parameter categoriesParameter;
+	 private CountableRealizationsParameter allParameters;
+	private Parameter uniqueParameters;
 
 	private CompoundLikelihood likelihood;
 
 	public DirichletProcessOperator(DirichletProcessPrior dpp, //
-			Parameter zParameter, //
-			// CountableRealizationsParameter countableRealizationsParameter,
-			Parameter parameter, //
-			CompoundLikelihood likelihood, //
+			Parameter categoriesParameter, //
+			Parameter uniqueParameters, //
+			 CountableRealizationsParameter allParameters,
+			Likelihood likelihood, //
 			int mhSteps, //
 			double weight//
 	) {
@@ -69,24 +69,26 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 		this.dpp = dpp;
 		this.intensity = dpp.getGamma();
 		this.uniqueRealizationCount = dpp.getCategoryCount();
-		this.realizationCount = zParameter.getDimension();
-
-		this.zParameter = zParameter;
-		// this.countableRealizationsParameter = countableRealizationsParameter;
-		this.parameter = parameter;
-		this.likelihood = likelihood;
+		this.realizationCount = categoriesParameter.getDimension();
 
+		this.categoriesParameter = categoriesParameter;
+		 this.allParameters = allParameters;
+		this.uniqueParameters = uniqueParameters;
+		this.likelihood = (CompoundLikelihood) likelihood;
+//		this.likelihood =  likelihood;
+		
 		this.mhSteps = mhSteps;
+		
 		setWeight(weight);
 
 	}// END: Constructor
 
 	public Parameter getParameter() {
-		return zParameter;
+		return categoriesParameter;
 	}// END: getParameter
 
 	public Variable getVariable() {
-		return zParameter;
+		return categoriesParameter;
 	}// END: getVariable
 
 	@Override
@@ -111,49 +113,116 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 			int[] occupancy = new int[uniqueRealizationCount];
 			for (int i = 0; i < realizationCount; i++) {
 				if (i != index) {
-					int j = (int) zParameter.getParameterValue(i);
+					int j = (int) categoriesParameter.getParameterValue(i);
 					occupancy[j]++;
 				}// END: i check
 			}// END: i loop
-			
-			
 
-			// TODO: set parameters at index values 
-			int category = (int) zParameter.getParameterValue(index);
 			
-			for (int i = 0; i < uniqueRealizationCount; i++) {
+	        double[] existingValues = new double[uniqueRealizationCount];
+	        int counter = 0;
+	        int singletonIndex = -1;
+	        for(int i = 0; i < uniqueRealizationCount;i++){
+	            if(occupancy[i] > 0) {
+	            	
+	                occupancy[counter] = occupancy[i];
+	                existingValues[counter++] = dpp.getUniqueParameter(i) .getParameterValue(0);
+
+	            } else {
+	            
+	            	singletonIndex = i;
+
+	            }//END: occupancy check
+	            
+	        }//END: i loop
 			
-				double candidate =0;
-				if (occupancy[i] == 0) {// draw new
-
-					// draw from base model
-
-					 candidate = dpp.baseModel.nextRandom()[0];
-
-				} else {// draw existing
-
-					// likelihood for component x_index
-
-					 candidate = dpp.getUniqueParameter(i)
-							.getParameterValue(0);
-
-
-				}// END: occupancy check
-				
-				parameter.setParameterValue(category, candidate);
+			
+			// Propose new value(s)
+			double[] baseProposals = new double[realizationCount];
+			for (int i = 0; i < baseProposals.length; i++) {
 				
+				baseProposals[i] = dpp.baseModel.nextRandom()[0];
 				
-			}// END: i loop
-			
-			double loglike = likelihood.getLogLikelihood();
-			System.out.println(loglike);
-			System.exit(-1);
+			}
 			
+	        // If a singleton
+            if(singletonIndex > -1) {
+            	
+                baseProposals[0] = uniqueParameters.getParameterValue(singletonIndex);
+
+            }
+
+			double[] logClusterProbs = new double[uniqueRealizationCount];
+            
+            // draw existing
+            int i;
+            for(i = 0; i < counter; i++) {
+            
+            	  logClusterProbs[i] = Math.log(occupancy[i] / (realizationCount - 1 + intensity));
+            	  
+            	  double value =  allParameters.getParameterValue(index);
+            	  double candidate = existingValues[i];
+            	  allParameters.setParameterValue(index, candidate);
+				  likelihood.makeDirty();
+            	  
+            	  logClusterProbs[i] = logClusterProbs[i] + likelihood.getLikelihood(index) .getLogLikelihood();
+//				  logClusterProbs[i] = logClusterProbs[i] + likelihood .getLogLikelihood();
+				  
+//            	  System.out.println(likelihood.getLikelihood(index) .getLogLikelihood() + " " + likelihood .getLogLikelihood());
+            	  
+            	  allParameters.setParameterValue(index, value);
+            	  likelihood.makeDirty();
+            	  
+            }
+            
+            // draw new
+            for(; i < logClusterProbs.length; i++){
+
+            	logClusterProbs[i] = Math.log((intensity) / (realizationCount - 1 + intensity)); 
+//            	logClusterProbs[i] = Math.log(intensity / uniqueRealizationCount / (realizationCount - 1 + intensity));
+            	
+            	  double value =  allParameters.getParameterValue(index);
+            	 double candidate = baseProposals[i - counter];
+            	 allParameters.setParameterValue(index, candidate);
+ 				 likelihood.makeDirty();
+ 				 
+            	 logClusterProbs[i] = logClusterProbs[i] + likelihood.getLikelihood(index).getLogLikelihood();
+// 				 logClusterProbs[i] = logClusterProbs[i] + likelihood.getLogLikelihood();
+ 				 
+//           	  System.out.println(likelihood.getLikelihood(index) .getLogLikelihood() + " " + likelihood .getLogLikelihood());
+            	 
+            	 allParameters.setParameterValue(index, value);
+            	 likelihood.makeDirty();
+            	 
+            }
+            
+            double smallestVal = logClusterProbs[0];
+            for(i = 1; i < uniqueRealizationCount; i++){
+                
+            	if(smallestVal > logClusterProbs[i]) {
+                    smallestVal = logClusterProbs[i];
+                }
+            
+            }
+            
+            
+            double[] clusterProbs = new double[uniqueRealizationCount];
+            for(i = 0; i < clusterProbs.length;i++) {
+                    clusterProbs[i] = Math.exp(logClusterProbs[i]-smallestVal);
+            
+            }
+
+//            dr.app.bss.Utils.printArray(clusterProbs);
+//         	System.exit(-1);
+            
+			// sample
+			int sampledCluster = MathUtils.randomChoicePDF(clusterProbs);
+			categoriesParameter.setParameterValue(index, sampledCluster);
+            
+            
 		}//END: index loop
 		
 		
-		
-		
 	}//END: doOp
 	
 	
@@ -167,7 +236,7 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 
 				if (i != index) {
 
-					int j = (int) zParameter.getParameterValue(i);
+					int j = (int) categoriesParameter.getParameterValue(i);
 					occupancy[j]++;
 
 				}// END: i check
@@ -182,8 +251,8 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 			Likelihood clusterLikelihood = (Likelihood) likelihood.getLikelihood(index);
 //			Likelihood clusterLikelihood = likelihood;
 			
-			int category = (int) zParameter.getParameterValue(index);
-			double value = parameter.getParameterValue(category);
+			int category = (int) categoriesParameter.getParameterValue(index);
+			double value = uniqueParameters.getParameterValue(category);
 			
 			double[] clusterProbs = new double[uniqueRealizationCount];
 			
@@ -196,13 +265,11 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 
 					double candidate = dpp.baseModel.nextRandom()[0];
 
-					parameter.setParameterValue(category, candidate);
+					uniqueParameters.setParameterValue(category, candidate);
 					double loglike = clusterLikelihood.getLogLikelihood();
-					parameter.setParameterValue(category, value);
+					uniqueParameters.setParameterValue(category, value);
 
-					logprob = Math.log((intensity)
-							/ (realizationCount - 1 + intensity))
-							+ loglike;
+					logprob = Math.log((intensity) / (realizationCount - 1 + intensity)) + loglike;
 
 				} else {// draw existing
 
@@ -211,26 +278,19 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 					double candidate = dpp.getUniqueParameter(i)
 							.getParameterValue(0);
 
-					parameter.setParameterValue(category, candidate);
+					uniqueParameters.setParameterValue(category, candidate);
 					double loglike = clusterLikelihood.getLogLikelihood();
-					parameter.setParameterValue(category, value);
+					uniqueParameters.setParameterValue(category, value);
 
-					logprob = Math.log(occupancy[i])
-							/ (realizationCount - 1 + intensity) + loglike;
+					logprob = Math.log(occupancy[i]) / (realizationCount - 1 + intensity) + loglike;
 
 				}// END: occupancy check
 
 				clusterProbs[i] = logprob;
 			}// END: i loop
 
-			
-			//////////////////////////////////////
-			
 			dr.app.bss.Utils.exponentiate(clusterProbs);
 
-//			dr.app.bss.Utils.printArray(clusterProbs);
-//			System.exit(-1);
-
 			if (DEBUG) {
 				System.out.println("P(z[index] | z[-index]): ");
 				dr.app.bss.Utils.printArray(clusterProbs);
@@ -238,7 +298,7 @@ public class DirichletProcessOperator extends SimpleMCMCOperator implements
 
 			// sample
 			int sampledCluster = MathUtils.randomChoicePDF(clusterProbs);
-			zParameter.setParameterValue(index, sampledCluster);
+			categoriesParameter.setParameterValue(index, sampledCluster);
 
 			if (DEBUG) {
 				System.out
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperatorParser.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperatorParser.java
index 3e634a0..3ad4b96 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperatorParser.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/DirichletProcessOperatorParser.java
@@ -27,6 +27,7 @@ package dr.app.beagle.evomodel.branchmodel.lineagespecific;
 
 import dr.inference.model.CompoundLikelihood;
 import dr.inference.model.CompoundParameter;
+import dr.inference.model.Likelihood;
 import dr.inference.model.Parameter;
 import dr.inference.operators.MCMCOperator;
 import dr.xml.AbstractXMLObjectParser;
@@ -46,20 +47,24 @@ public class DirichletProcessOperatorParser extends AbstractXMLObjectParser {
 	public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
 		DirichletProcessPrior dpp = (DirichletProcessPrior) xo.getChild(DirichletProcessPrior.class);
-		CompoundLikelihood likelihood = (CompoundLikelihood) xo .getElementFirstChild(DATA_LOG_LIKELIHOOD);
+		Likelihood likelihood = (Likelihood) xo .getElementFirstChild(DATA_LOG_LIKELIHOOD);
 		Parameter categoriesParameter = (Parameter) xo.getElementFirstChild(  DirichletProcessPriorParser.CATEGORIES);
 		
-//		CountableRealizationsParameter allParameters = (CountableRealizationsParameter) xo.getChild(CountableRealizationsParameter.class);
+		CountableRealizationsParameter allParameters = (CountableRealizationsParameter) xo.getChild(CountableRealizationsParameter.class);
 		CompoundParameter uniquelyRealizedParameters = (CompoundParameter) xo.getChild(CompoundParameter.class);
 		
 		int M = xo.getIntegerAttribute(MH_STEPS);
 		final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
 
-		return 
-		new DirichletProcessOperator(dpp, categoriesParameter, 
-				uniquelyRealizedParameters, 
-//				allParameters,
-				likelihood, M, weight);
+		return new DirichletProcessOperator(dpp, // 
+				categoriesParameter, //
+				uniquelyRealizedParameters, //
+				allParameters, //
+				likelihood, //
+				M, //
+				weight //
+				);
+		
 	}// END: parseXMLObject
 
 	@Override
@@ -67,7 +72,7 @@ public class DirichletProcessOperatorParser extends AbstractXMLObjectParser {
 		return new XMLSyntaxRule[] {
 		new ElementRule(DirichletProcessPrior.class, false),
 		new ElementRule(CompoundParameter.class, false), //
-//		new ElementRule(CountableRealizationsParameter.class, false), //
+		new ElementRule(CountableRealizationsParameter.class, false), //
 		AttributeRule.newDoubleRule(MCMCOperator.WEIGHT) //
 		};
 
diff --git a/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractLikelihoodOnTree.java b/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractLikelihoodOnTree.java
new file mode 100644
index 0000000..db73f99
--- /dev/null
+++ b/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractLikelihoodOnTree.java
@@ -0,0 +1,640 @@
+/*
+ * NewAbstractTreeLikelihood.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.evomodel.newtreelikelihood;
+
+import beagle.Beagle;
+import dr.app.beagle.evomodel.treelikelihood.BufferIndexHelper;
+import dr.app.beagle.evomodel.treelikelihood.EvolutionaryProcessDelegate;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.Reportable;
+
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * AbstractTreeLikelihood - a base class for likelihood calculators of sites on a tree.
+ *
+ * @author Andrew Rambaut
+ * @author Marc Suchard
+ * @version $Id: AbstractTreeLikelihood.java,v 1.16 2005/06/07 16:27:39 alexei Exp $
+ */
+
+public abstract class NewAbstractLikelihoodOnTree extends AbstractModelLikelihood implements Reportable {
+
+    protected static final boolean COUNT_TOTAL_OPERATIONS = false;
+
+    public NewAbstractLikelihoodOnTree(String name, TreeModel treeModel, Map<Set<String>, Parameter> partialsRestrictions) {
+
+        super(name);
+
+        this.treeModel = treeModel;
+        addModel(treeModel);
+
+        nodeCount = treeModel.getNodeCount();
+        tipCount = treeModel.getExternalNodeCount();
+        internalNodeCount = nodeCount - tipCount;
+
+        // one partials buffer for each tip and two for each internal node (for store restore)
+        partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
+
+        updateNode = new boolean[nodeCount];
+        for (int i = 0; i < nodeCount; i++) {
+            updateNode[i] = true;
+        }
+
+        likelihoodKnown = false;
+
+        // Partials restrictions
+        this.partialsRestrictions = partialsRestrictions;
+//            hasRestrictedPartials = (partialsRestrictions != null);
+        if (hasRestrictedPartials) {
+            numRestrictedPartials = partialsRestrictions.size();
+            updateRestrictedNodePartials = true;
+            partialsMap = new Parameter[treeModel.getNodeCount()];
+//            partials = new double[stateCount * patternCount * categoryCount];
+        } else {
+            numRestrictedPartials = 0;
+            updateRestrictedNodePartials = false;
+        }
+    }
+
+    public void getPartials(int number, double[] partials) {
+        int cumulativeBufferIndex = Beagle.NONE;
+        /* No need to rescale partials */
+        beagle.getPartials(partialBufferHelper.getOffsetIndex(number), cumulativeBufferIndex, partials);
+    }
+
+    /**
+     * Set update flag for a node and its children
+     */
+    protected void updateNode(NodeRef node) {
+
+        updateNode[node.getNumber()] = true;
+        likelihoodKnown = false;
+    }
+
+    /**
+     * Set update flag for a node and its direct children
+     */
+    protected void updateNodeAndChildren(NodeRef node) {
+        updateNode[node.getNumber()] = true;
+
+        for (int i = 0; i < treeModel.getChildCount(node); i++) {
+            NodeRef child = treeModel.getChild(node, i);
+            updateNode[child.getNumber()] = true;
+        }
+        likelihoodKnown = false;
+    }
+
+    /**
+     * Set update flag for a node and all its descendents
+     */
+    protected void updateNodeAndDescendents(NodeRef node) {
+        updateNode[node.getNumber()] = true;
+
+        for (int i = 0; i < treeModel.getChildCount(node); i++) {
+            NodeRef child = treeModel.getChild(node, i);
+            updateNodeAndDescendents(child);
+        }
+
+        likelihoodKnown = false;
+    }
+
+    /**
+     * Set update flag for all nodes
+     */
+    protected void updateAllNodes() {
+        for (int i = 0; i < nodeCount; i++) {
+            updateNode[i] = true;
+        }
+        likelihoodKnown = false;
+    }
+
+//    protected abstract boolean hasBranchSpecificEvolutionaryProcess();
+
+    protected boolean updateBranchSpecificEvolutionaryProcess(final Tree tree, final int nodeNum, final NodeRef parent,
+                                                              final NodeRef node, final boolean flip) {
+        return false;
+    }
+
+    protected void handlePartialsScaling(final int[] operations, final int nodeNum, final int x) {
+        // Do nothing
+    }
+
+//    protected void handleEvolutionaryProcess(final int[] operations, final NodeRef child1, final NodeRef child2, final int x) {
+//        // Do nothing
+//    }
+
+    protected void handleRestrictedPartials(final int nodeNum) {
+        // Do nothing
+    }
+
+    protected void setEvolutionaryProcessDelegate(final EvolutionaryProcessDelegate delegate) {
+        this.evolutionaryProcessDelegate = delegate;
+    }
+
+    protected void prepareStorage() {
+        if (branchUpdateIndices == null) {
+            branchUpdateIndices = new int[nodeCount];
+            branchLengths = new double[nodeCount];
+//            scaleBufferIndices = new int[internalNodeCount];
+//            storedScaleBufferIndices = new int[internalNodeCount];
+        }
+
+        if (operations == null) {
+            operations = new int[numRestrictedPartials + 1][internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
+            operationCount = new int[numRestrictedPartials + 1];
+        }
+    }
+
+    protected void prepareScaling() {
+        // Do nothing
+    }
+
+    protected void prepareTips() {
+        // Do nothing
+    }
+
+    protected void updateRootInformation() {
+        // Do nothing
+    }
+
+    protected int accumulateScaleFactors() {
+        return Beagle.NONE;
+    }
+
+    protected void updateSiteModelAction() {
+        // Do nothing
+    }
+
+    protected double computedAscertainedLogLikelihood() {
+        throw new RuntimeException("Not an ascertained model");
+    }
+
+    protected boolean doRescalingNow(boolean firstAttempt) {
+        return false;
+    }
+
+    /**
+      * Calculate the log likelihood of the current state.
+      *
+      * @return the log likelihood.
+      */
+    protected double calculateLogLikelihood() {
+
+        prepareStorage();
+
+        prepareScaling();
+
+        prepareTips();
+
+        branchUpdateCount = 0;
+        operationListCount = 0;
+
+        if (hasRestrictedPartials) {
+            for (int i = 0; i <= numRestrictedPartials; i++) {
+                operationCount[i] = 0;
+            }
+        } else {
+            operationCount[0] = 0;
+        }
+
+        final NodeRef root = treeModel.getRoot();
+        postOrderTraverse(treeModel, root, null, true);
+
+        if (updateSubstitutionModel) { // TODO More efficient to update only the substitution model that changed, instead of all
+            evolutionaryProcessDelegate.updateSubstitutionModels(beagle);
+
+            // we are currently assuming a no-category model...
+        }
+
+        if (updateSiteModel) {
+            updateSiteModelAction();
+        }
+
+        if (branchUpdateCount > 0) {
+            evolutionaryProcessDelegate.updateTransitionMatrices(
+                    beagle,
+                    branchUpdateIndices,
+                    branchLengths,
+                    branchUpdateCount);
+        }
+
+        if (COUNT_TOTAL_OPERATIONS) {
+            totalMatrixUpdateCount += branchUpdateCount;
+
+            for (int i = 0; i <= numRestrictedPartials; i++) {
+                totalOperationCount += operationCount[i];
+            }
+        }
+
+        double logL;
+        boolean done;
+        boolean firstRescaleAttempt = true;
+
+        do {
+
+            if (hasRestrictedPartials) {
+                for (int i = 0; i <= numRestrictedPartials; i++) {
+                    beagle.updatePartials(operations[i], operationCount[i], Beagle.NONE);
+                    if (i < numRestrictedPartials) {
+                        //                        restrictNodePartials(restrictedIndices[i]);
+                    }
+                }
+            } else {
+                beagle.updatePartials(operations[0], operationCount[0], Beagle.NONE);
+            }
+
+            int rootIndex = partialBufferHelper.getOffsetIndex(root.getNumber());
+
+            int cumulateScaleBufferIndex = accumulateScaleFactors();
+
+            updateRootInformation();
+
+            double[] sumLogLikelihoods = new double[1];
+
+            beagle.calculateRootLogLikelihoods(new int[]{rootIndex}, new int[]{0}, new int[]{0},
+                    new int[]{cumulateScaleBufferIndex}, 1, sumLogLikelihoods);
+
+            logL = sumLogLikelihoods[0];
+
+            if (ascertainedSitePatterns) {
+                logL = computedAscertainedLogLikelihood();
+            }
+
+            if (Double.isNaN(logL) || Double.isInfinite(logL)) {
+                everUnderflowed = true;
+                logL = Double.NEGATIVE_INFINITY;
+
+                if (doRescalingNow(firstRescaleAttempt)) {
+
+                    branchUpdateCount = 0;
+
+                    if (hasRestrictedPartials) {
+                        for (int i = 0; i <= numRestrictedPartials; i++) {
+                            operationCount[i] = 0;
+                        }
+                    } else {
+                        operationCount[0] = 0;
+                    }
+
+                    // traverse again but without flipping partials indices as we
+                    // just want to overwrite the last attempt. We will flip the
+                    // scale buffer indices though as we are recomputing them.
+                    postOrderTraverse(treeModel, root, null, false);
+
+                    done = false; // Run through do-while loop again
+                    firstRescaleAttempt = false; // Only try to rescale once
+                } else {
+                    // we have already tried a rescale, not rescaling or always rescaling
+                    // so just return the likelihood...
+                    done = true;
+                }
+            } else {
+                done = true; // No under-/over-flow, then done
+            }
+
+        } while (!done);
+
+        // If these are needed...
+        //beagle.getSiteLogLikelihoods(patternLogLikelihoods);
+
+        //********************************************************************
+        // after traverse all nodes and patterns have been updated --
+        //so change flags to reflect this.
+        for (int i = 0; i < nodeCount; i++) {
+            updateNode[i] = false;
+        }
+
+        updateSubstitutionModel = false;
+        updateSiteModel = false;
+        //********************************************************************
+
+        return logL;
+    }
+
+    /**
+     * Traverse the tree calculating partial likelihoods.
+     *
+     * @param tree           tree
+     * @param node           node
+     * @param operatorNumber operatorNumber
+     * @param flip           flip
+     * @return boolean
+     */
+    final protected boolean postOrderTraverse(final Tree tree, final NodeRef node, final int[] operatorNumber, final boolean flip) {
+
+        boolean update = false;
+
+        final int nodeNum = node.getNumber();
+
+        final NodeRef parent = tree.getParent(node);
+
+        if (operatorNumber != null) {
+            operatorNumber[0] = -1;
+        }
+
+        // First update the evolutionary process for this branch
+        if (parent != null && updateNode[nodeNum]) {
+            update = updateBranchSpecificEvolutionaryProcess(tree, nodeNum, parent, node, flip);
+        }
+
+        // If the node is internal, update the partial likelihoods.
+        if (!tree.isExternal(node)) {
+
+            // Traverse down the two child nodes
+            final NodeRef child1 = tree.getChild(node, 0);
+            final int[] op1 = {-1};
+            final boolean update1 = postOrderTraverse(tree, child1, op1, flip);
+
+            final NodeRef child2 = tree.getChild(node, 1);
+            final int[] op2 = {-1};
+            final boolean update2 = postOrderTraverse(tree, child2, op2, flip);
+
+            // If either child node was updated then update this node too
+            if (update1 || update2) {
+
+                final int x = operationCount[operationListCount] * Beagle.OPERATION_TUPLE_SIZE;
+                final int[] operations = this.operations[operationListCount];
+
+                if (flip) {
+                    // first flip the partialBufferHelper
+                    partialBufferHelper.flipOffset(nodeNum);
+                }
+
+                operations[x] = partialBufferHelper.getOffsetIndex(nodeNum);
+
+                handlePartialsScaling(operations, nodeNum, x);
+
+                operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNumber()); // source node 1
+                operations[x + 4] = evolutionaryProcessDelegate.getMatrixIndex(child1.getNumber()); // source matrix 1
+                operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNumber()); // source node 2
+                operations[x + 6] = evolutionaryProcessDelegate.getMatrixIndex(child2.getNumber()); // source matrix 2
+
+                handleRestrictedPartials(nodeNum);
+
+                ++operationCount[operationListCount];
+                update = true;
+            }
+        }
+
+        return update;
+    }
+
+    // **************************************************************
+    // VariableListener IMPLEMENTATION
+    // **************************************************************
+
+    protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    }
+
+    // **************************************************************
+    // Model IMPLEMENTATION
+    // **************************************************************
+    protected void handleModelChangedEvent(Model model, Object object, int index) {
+
+        fireModelChanged();
+
+        if (model == treeModel) {
+            if (object instanceof TreeModel.TreeChangedEvent) {
+
+                if (((TreeModel.TreeChangedEvent) object).isNodeChanged()) {
+                    // If a node event occurs the node and its two child nodes
+                    // are flagged for updating (this will result in everything
+                    // above being updated as well. Node events occur when a node
+                    // is added to a branch, removed from a branch or its height or
+                    // rate changes.
+                    updateNodeAndChildren(((TreeModel.TreeChangedEvent) object).getNode());
+                    updateRestrictedNodePartials = true;
+
+                } else if (((TreeModel.TreeChangedEvent) object).isTreeChanged()) {
+                    // Full tree events result in a complete updating of the tree likelihood
+                    // This event type is now used for EmpiricalTreeDistributions.
+                    //                    System.err.println("Full tree update event - these events currently aren't used\n" +
+                    //                            "so either this is in error or a new feature is using them so remove this message.");
+                    updateAllNodes();
+                    updateRestrictedNodePartials = true;
+                } else {
+                    // Other event types are ignored (probably trait changes).
+                    //System.err.println("Another tree event has occured (possibly a trait change).");
+                }
+            }
+        }
+
+        if (COUNT_TOTAL_OPERATIONS) {
+            totalModelChangedCount++;
+        }
+
+        likelihoodKnown = false;
+    }
+
+    /**
+     * Stores the additional state other than model components
+     */
+    protected void storeState() {
+        partialBufferHelper.storeState();
+        evolutionaryProcessDelegate.storeState();
+
+        storedLikelihoodKnown = likelihoodKnown;
+        storedLogLikelihood = logLikelihood;
+    }
+
+    /**
+     * Restore the additional stored state
+     */
+    protected void restoreState() {
+
+        updateSiteModel = true; // this is required to upload the categoryRates to BEAGLE after the restore
+
+        partialBufferHelper.restoreState();
+        evolutionaryProcessDelegate.restoreState();
+
+        updateRestrictedNodePartials = true;
+
+        likelihoodKnown = storedLikelihoodKnown;
+        logLikelihood = storedLogLikelihood;
+    }
+
+    protected void acceptState() {
+    } // nothing to do
+
+    // **************************************************************
+    // Likelihood IMPLEMENTATION
+    // **************************************************************
+
+    public final Model getModel() {
+        return this;
+    }
+
+    public final double getLogLikelihood() {
+        if (COUNT_TOTAL_OPERATIONS) {
+            totalGetLogLikelihoodCount++;
+        }
+
+        if (CompoundLikelihood.DEBUG_PARALLEL_EVALUATION) {
+            System.err.println((likelihoodKnown ? "lazy" : "evaluate"));
+        }
+
+        if (!likelihoodKnown) {
+            if (COUNT_TOTAL_OPERATIONS)
+                totalCalculateLikelihoodCount++;
+            logLikelihood = calculateLogLikelihood();
+            likelihoodKnown = true;
+        }
+
+        return logLikelihood;
+    }
+
+    /**
+     * Forces a complete recalculation of the likelihood next time getLikelihood is called
+     */
+    public void makeDirty() {
+        if (COUNT_TOTAL_OPERATIONS) {
+            totalMakeDirtyCount++;
+        }
+
+        likelihoodKnown = false;
+        updateAllNodes();
+
+        updateSiteModel = true;
+        updateSubstitutionModel = true;
+        updateRestrictedNodePartials = true;
+    }
+    
+    public boolean isLikelihoodKnown() {
+    	return likelihoodKnown;
+    }
+
+//    protected abstract double calculateLogLikelihood();
+
+    public String getReport() {
+        if (hasInitialized) {
+            String rtnValue =  getClass().getName() + "(" + getLogLikelihood() + ")";
+            if (COUNT_TOTAL_OPERATIONS)
+             rtnValue += " total operations = " + totalOperationCount +
+                         " matrix updates = " + totalMatrixUpdateCount + " model changes = " + totalModelChangedCount +
+                         " make dirties = " + totalMakeDirtyCount +
+                         " calculate likelihoods = " + totalCalculateLikelihoodCount +
+                         " get likelihoods = " + totalGetLogLikelihoodCount +
+                         " all rate updates = " + totalRateUpdateAllCount +
+                         " partial rate updates = " + totalRateUpdateSingleCount;
+            return rtnValue;
+        } else {
+            return getClass().getName() + "(uninitialized)";
+        }
+    }
+
+    // **************************************************************
+    // INSTANCE VARIABLES
+    // **************************************************************
+
+    /**
+     * the tree
+     */
+    protected TreeModel treeModel = null;
+
+    /**
+     * the number of nodes in the tree
+     */
+    protected final int nodeCount;
+
+    protected final int tipCount;
+    protected final int internalNodeCount;
+
+
+    protected int[][] operations;
+    protected int operationListCount;
+    protected int[] operationCount;
+
+    protected int[] branchUpdateIndices;
+    protected double[] branchLengths;
+    protected int branchUpdateCount;
+
+    protected static final boolean hasRestrictedPartials = false;
+    protected final int numRestrictedPartials;
+    protected final Map<Set<String>, Parameter> partialsRestrictions;
+    protected Parameter[] partialsMap;
+    protected double[] partials;
+    protected boolean updateRestrictedNodePartials;
+
+    /**
+     * Flags to specify which nodes are to be updated
+     */
+    protected boolean[] updateNode;
+
+    protected final BufferIndexHelper partialBufferHelper;
+    protected EvolutionaryProcessDelegate evolutionaryProcessDelegate;
+
+    private double logLikelihood;
+    private double storedLogLikelihood;
+    protected boolean likelihoodKnown = false;
+    private boolean storedLikelihoodKnown = false;
+
+    // Scale factor info
+    protected boolean everUnderflowed = false;
+//    protected boolean implementsScaling = false;
+//    protected boolean recomputeScaleFactors = false;
+
+
+    /**
+     * the BEAGLE library instance
+     */
+    protected Beagle beagle;
+
+    /**
+     * Flag to specify that the substitution model has changed
+     */
+    protected boolean updateSubstitutionModel;
+
+    /**
+     * Flag to specify that the site model has changed
+     */
+    protected boolean updateSiteModel;
+
+    /**
+     * Flag to specify if site patterns are ascertained
+     */
+    protected boolean ascertainedSitePatterns = false;
+
+
+
+
+    protected boolean hasInitialized = false;
+
+    protected int totalOperationCount = 0;
+    protected int totalMatrixUpdateCount = 0;
+    protected int totalGetLogLikelihoodCount = 0;
+    protected int totalModelChangedCount = 0;
+    protected int totalMakeDirtyCount = 0;
+    protected int totalCalculateLikelihoodCount = 0;
+    protected int totalRateUpdateAllCount = 0;
+    protected int totalRateUpdateSingleCount = 0;
+
+}
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractSequenceLikelihood.java b/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractSequenceLikelihood.java
new file mode 100644
index 0000000..eb401e1
--- /dev/null
+++ b/src/dr/app/beagle/evomodel/newtreelikelihood/NewAbstractSequenceLikelihood.java
@@ -0,0 +1,125 @@
+/*
+ * NewAbstractSinglePartitionTreeLikelihood.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.evomodel.newtreelikelihood;
+/**
+ * ${CLASS_NAME}
+ *
+ * @author Andrew Rambaut
+ * @author Marc A. Suchard
+ * @version $Id$
+ *
+ * $HeadURL$
+ *
+ * $LastChangedBy$
+ * $LastChangedDate$
+ * $LastChangedRevision$
+ */
+
+import dr.evolution.alignment.PatternList;
+import dr.evolution.datatype.DataType;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Parameter;
+
+import java.util.Map;
+import java.util.Set;
+
+public abstract class NewAbstractSequenceLikelihood extends NewAbstractLikelihoodOnTree {
+
+    public NewAbstractSequenceLikelihood(String name, PatternList patternList, TreeModel treeModel,
+                                         Map<Set<String>, Parameter> partialsRestrictions) {
+        super(name, treeModel, partialsRestrictions);
+
+        this.patternList = patternList;
+        this.dataType = patternList.getDataType();
+        patternCount = patternList.getPatternCount();
+        stateCount = dataType.getStateCount();
+
+        patternWeights = patternList.getPatternWeights();
+
+    }
+
+    /**
+     * Set update flag for a pattern
+     */
+    protected void updatePattern(int i) {
+        if (updatePattern != null) {
+            updatePattern[i] = true;
+        }
+        likelihoodKnown = false;
+    }
+
+    /**
+     * Set update flag for all patterns
+     */
+    protected void updateAllPatterns() {
+        if (updatePattern != null) {
+            for (int i = 0; i < patternCount; i++) {
+                updatePattern[i] = true;
+            }
+        }
+        likelihoodKnown = false;
+    }
+
+    public final double[] getPatternWeights() {
+        return patternWeights;
+    }
+
+    /**
+     * Forces a complete recalculation of the likelihood next time getLikelihood is called
+     */
+    public void makeDirty() {
+        super.makeDirty();
+        updateAllPatterns();
+    }
+
+    /**
+     * the patternList
+     */
+    protected PatternList patternList = null;
+    protected DataType dataType = null;
+
+    /**
+     * the pattern weights
+     */
+    protected double[] patternWeights;
+
+    /**
+     * the number of patterns
+     */
+    protected int patternCount;
+
+    /**
+     * the number of states in the data
+     */
+    protected int stateCount;
+
+    /**
+     * Flags to specify which patterns are to be updated
+     */
+    protected boolean[] updatePattern = null;
+
+
+}
diff --git a/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
similarity index 55%
rename from src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleTreeLikelihood.java
rename to src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
index 138faba..4259e21 100644
--- a/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
@@ -1,5 +1,5 @@
 /*
- * NewBeagleTreeLikelihood.java
+ * BeagleTreeLikelihood.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -35,7 +35,10 @@ import dr.app.beagle.evomodel.sitemodel.SiteRateModel;
 import dr.app.beagle.evomodel.substmodel.FrequencyModel;
 import dr.app.beagle.evomodel.substmodel.HKY;
 import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
-import dr.app.beagle.evomodel.treelikelihood.*;
+import dr.app.beagle.evomodel.treelikelihood.BeagleTreeLikelihood;
+import dr.app.beagle.evomodel.treelikelihood.BufferIndexHelper;
+import dr.app.beagle.evomodel.treelikelihood.PartialsRescalingScheme;
+import dr.app.beagle.evomodel.treelikelihood.SubstitutionModelDelegate;
 import dr.app.beagle.tools.BeagleSequenceSimulator;
 import dr.app.beagle.tools.Partition;
 import dr.evolution.alignment.Alignment;
@@ -54,6 +57,7 @@ import dr.evomodel.tree.TreeModel;
 import dr.evomodel.treelikelihood.TipStatesModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
+import dr.inference.model.ThreadAwareLikelihood;
 import dr.math.MathUtils;
 
 import java.util.*;
@@ -68,7 +72,7 @@ import java.util.logging.Logger;
  */
 
 @SuppressWarnings("serial")
-public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood {
+public class NewBeagleSequenceLikelihood extends NewAbstractSequenceLikelihood implements ThreadAwareLikelihood {
 
     // This property is a comma-delimited list of resource numbers (0 == CPU) to
     // allocate each BEAGLE instance to. If less than the number of instances then
@@ -79,6 +83,7 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
     private static final String SCALING_PROPERTY = "beagle.scaling";
     private static final String RESCALE_FREQUENCY_PROPERTY = "beagle.rescale";
     private static final String EXTRA_BUFFER_COUNT_PROPERTY = "beagle.extra.buffer.count";
+    private static final String FORCE_VECTORIZATION = "beagle.force.vectorization";
 
     // Which scheme to use if choice not specified (or 'default' is selected):
     private static final PartialsRescalingScheme DEFAULT_RESCALING_SCHEME = PartialsRescalingScheme.DYNAMIC;
@@ -94,37 +99,37 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
     private static final int RESCALE_FREQUENCY = 100;
     private static final int RESCALE_TIMES = 1;
 
-    public NewBeagleTreeLikelihood(PatternList patternList,
-                                   TreeModel treeModel,
-                                   BranchModel branchModel,
-                                   SiteModel siteModel,
-                                   BranchRateModel branchRateModel,
-                                   TipStatesModel tipStatesModel,
-                                   boolean useAmbiguities,
-                                   PartialsRescalingScheme rescalingScheme) {
+    public NewBeagleSequenceLikelihood(PatternList patternList,
+                                       TreeModel treeModel,
+                                       BranchModel branchModel,
+                                       SiteRateModel siteRateModel,
+                                       BranchRateModel branchRateModel,
+                                       TipStatesModel tipStatesModel,
+                                       boolean useAmbiguities,
+                                       PartialsRescalingScheme rescalingScheme) {
 
-        this(patternList, treeModel, branchModel, siteModel, branchRateModel, tipStatesModel, useAmbiguities, rescalingScheme, null);
+        this(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, rescalingScheme, null);
     }
 
-    public NewBeagleTreeLikelihood(PatternList patternList,
-                                   TreeModel treeModel,
-                                   BranchModel branchModel,
-                                   SiteModel siteModel,
-                                   BranchRateModel branchRateModel,
-                                   TipStatesModel tipStatesModel,
-                                   boolean useAmbiguities,
-                                   PartialsRescalingScheme rescalingScheme,
-                                   Map<Set<String>, Parameter> partialsRestrictions) {
+    public NewBeagleSequenceLikelihood(PatternList patternList,
+                                       TreeModel treeModel,
+                                       BranchModel branchModel,
+                                       SiteRateModel siteRateModel,
+                                       BranchRateModel branchRateModel,
+                                       TipStatesModel tipStatesModel,
+                                       boolean useAmbiguities,
+                                       PartialsRescalingScheme rescalingScheme,
+                                       Map<Set<String>, Parameter> partialsRestrictions) {
 
-        super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel);
+        super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel, partialsRestrictions);
 
         try {
             final Logger logger = Logger.getLogger("dr.evomodel");
 
-            logger.info("Using BEAGLE TreeLikelihood");
+            logger.info("Using NEW BEAGLE SequenceLikelihood");
 
-            this.siteModel = siteModel;
-            addModel(this.siteModel);
+            this.siteRateModel = siteRateModel;
+            addModel(this.siteRateModel);
 
             this.branchModel = branchModel;
             addModel(this.branchModel);
@@ -139,11 +144,7 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
 
             this.tipStatesModel = tipStatesModel;
 
-            this.categoryCount = this.siteModel.getCategoryCount();
-
-            this.tipCount = treeModel.getExternalNodeCount();
-
-            internalNodeCount = nodeCount - tipCount;
+            this.categoryCount = this.siteRateModel.getCategoryCount();
 
             int compactPartialsCount = tipCount;
             if (useAmbiguities) {
@@ -151,34 +152,17 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                 compactPartialsCount = 0;
             }
 
-            // one partials buffer for each tip and two for each internal node (for store restore)
-            partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
-
             // one scaling buffer for each internal node plus an extra for the accumulation, then doubled for store/restore
             scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);
 
-            // Attempt to get the resource order from the System Property
-            if (resourceOrder == null) {
-                resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
-            }
-            if (preferredOrder == null) {
-                preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
-            }
-            if (requiredOrder == null) {
-                requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
-            }
-            if (scalingOrder == null) {
-                scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
-            }
-            if (extraBufferOrder == null) {
-                extraBufferOrder = parseSystemPropertyIntegerArray(EXTRA_BUFFER_COUNT_PROPERTY);
-            }
+            readEnvironmentProperties();
 
             int extraBufferCount = -1; // default
             if (extraBufferOrder.size() > 0) {
                 extraBufferCount = extraBufferOrder.get(instanceCount % extraBufferOrder.size());
             }
             substitutionModelDelegate = new SubstitutionModelDelegate(treeModel, branchModel, extraBufferCount);
+            super.setEvolutionaryProcessDelegate(substitutionModelDelegate);
 
             // first set the rescaling scheme to use from the parser
             this.rescalingScheme = rescalingScheme;
@@ -238,7 +222,15 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                     preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
             }
 
-            if (BeagleFlag.VECTOR_SSE.isSet(preferenceFlags) && stateCount != 4) {
+            boolean forceVectorization = false;
+            String vectorizationString = System.getProperty(FORCE_VECTORIZATION);
+            if (vectorizationString != null) {
+                forceVectorization = true;
+            }
+
+            if (BeagleFlag.VECTOR_SSE.isSet(preferenceFlags) && (stateCount != 4)
+                    && !forceVectorization
+                    ) {
                 // @todo SSE doesn't seem to work for larger state spaces so for now we override the
                 // SSE option.
                 preferenceFlags &= ~BeagleFlag.VECTOR_SSE.getMask();
@@ -249,6 +241,10 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                 }
             }
 
+            if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
+                everUnderflowed = false; // If false, BEAST does not rescale until first under-/over-flow.
+            }
+
             if (!BeagleFlag.PRECISION_SINGLE.isSet(preferenceFlags)) {
                 // if single precision not explicitly set then prefer double
                 preferenceFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
@@ -275,33 +271,8 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                     requirementFlags
             );
 
-            InstanceDetails instanceDetails = beagle.getDetails();
-            ResourceDetails resourceDetails = null;
-
-            if (instanceDetails != null) {
-                resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
-                if (resourceDetails != null) {
-                    StringBuilder sb = new StringBuilder("  Using BEAGLE resource ");
-                    sb.append(resourceDetails.getNumber()).append(": ");
-                    sb.append(resourceDetails.getName()).append("\n");
-                    if (resourceDetails.getDescription() != null) {
-                        String[] description = resourceDetails.getDescription().split("\\|");
-                        for (String desc : description) {
-                            if (desc.trim().length() > 0) {
-                                sb.append("    ").append(desc.trim()).append("\n");
-                            }
-                        }
-                    }
-                    sb.append("    with instance flags: ").append(instanceDetails.toString());
-                    logger.info(sb.toString());
-                } else {
-                    logger.info("  Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
-                }
-            } else {
-                logger.info("  No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
-            }
-            logger.info("  " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
-            logger.info("  With " + patternList.getPatternCount() + " unique site patterns.");
+            printInstanceDetails(beagle, logger, useAmbiguities, patternList,
+                    this.rescalingScheme, rescalingFrequency);
 
             if (tipStatesModel != null) {
                 tipStatesModel.setTree(treeModel);
@@ -352,37 +323,8 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                 ascertainedSitePatterns = true;
             }
 
-            this.partialsRestrictions = partialsRestrictions;
-//            hasRestrictedPartials = (partialsRestrictions != null);
-            if (hasRestrictedPartials) {
-                numRestrictedPartials = partialsRestrictions.size();
-                updateRestrictedNodePartials = true;
-                partialsMap = new Parameter[treeModel.getNodeCount()];
-                partials = new double[stateCount * patternCount * categoryCount];
-            } else {
-                numRestrictedPartials = 0;
-                updateRestrictedNodePartials = false;
-            }
-
             beagle.setPatternWeights(patternWeights);
 
-            String rescaleMessage = "  Using rescaling scheme : " + this.rescalingScheme.getText();
-            if (this.rescalingScheme == PartialsRescalingScheme.AUTO &&
-                    resourceDetails != null &&
-                    (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
-                // If auto scaling in BEAGLE is not supported then do it here
-                this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
-                rescaleMessage = "  Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText();
-            }
-            if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
-                rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations)";
-            }
-            logger.info(rescaleMessage);
-
-            if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
-                everUnderflowed = false; // If false, BEAST does not rescale until first under-/over-flow.
-            }
-
             updateSubstitutionModel = true;
             updateSiteModel = true;
 
@@ -393,6 +335,72 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
         hasInitialized = true;
     }
 
+
+    private static void readEnvironmentProperties() {
+        // Attempt to get the resource order from the System Property
+        if (resourceOrder == null) {
+            resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
+        }
+        if (preferredOrder == null) {
+            preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
+        }
+        if (requiredOrder == null) {
+            requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
+        }
+        if (scalingOrder == null) {
+            scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
+        }
+        if (extraBufferOrder == null) {
+            extraBufferOrder = parseSystemPropertyIntegerArray(EXTRA_BUFFER_COUNT_PROPERTY);
+        }
+    }
+
+    private static void printInstanceDetails(Beagle beagle, Logger logger, boolean useAmbiguities,
+                                             PatternList patternList, PartialsRescalingScheme rescalingScheme,
+                                             int rescalingFrequency) {
+
+        InstanceDetails instanceDetails = beagle.getDetails();
+        ResourceDetails resourceDetails = null;
+
+        if (instanceDetails != null) {
+            resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
+            if (resourceDetails != null) {
+                StringBuilder sb = new StringBuilder("  Using BEAGLE resource ");
+                sb.append(resourceDetails.getNumber()).append(": ");
+                sb.append(resourceDetails.getName()).append("\n");
+                if (resourceDetails.getDescription() != null) {
+                    String[] description = resourceDetails.getDescription().split("\\|");
+                    for (String desc : description) {
+                        if (desc.trim().length() > 0) {
+                            sb.append("    ").append(desc.trim()).append("\n");
+                        }
+                    }
+                }
+                sb.append("    with instance flags: ").append(instanceDetails.toString());
+                logger.info(sb.toString());
+            } else {
+                logger.info("  Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
+            }
+        } else {
+            logger.info("  No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
+        }
+        logger.info("  " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
+        logger.info("  With " + patternList.getPatternCount() + " unique site patterns.");
+
+        String rescaleMessage = "  Using rescaling scheme : " + rescalingScheme.getText();
+        if (rescalingScheme == PartialsRescalingScheme.AUTO &&
+                resourceDetails != null &&
+                (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
+            // If auto scaling in BEAGLE is not supported then do it here
+            rescalingScheme = PartialsRescalingScheme.DYNAMIC;
+            rescaleMessage = "  Auto rescaling not supported in BEAGLE, using : " + rescalingScheme.getText();
+        }
+        if (rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
+            rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations)";
+        }
+        logger.info(rescaleMessage);
+    }
+
     private static List<Integer> parseSystemPropertyIntegerArray(String propertyName) {
         List<Integer> order = new ArrayList<Integer>();
         String r = System.getProperty(propertyName);
@@ -428,41 +436,41 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
         }
         return order;
     }
-    
+
     public TipStatesModel getTipStatesModel() {
-    	return tipStatesModel;
+        return tipStatesModel;
     }
-    
+
     public PatternList getPatternsList() {
-    	return patternList;
+        return patternList;
     }
 
     public TreeModel getTreeModel() {
         return treeModel;
     }
-    
+
     public BranchModel getBranchModel() {
-    	return branchModel;
+        return branchModel;
     }
 
-    public SiteModel getSiteModel() {
-        return siteModel;
+    public SiteRateModel getSiteRateModel() {
+        return siteRateModel;
     }
 
     public BranchRateModel getBranchRateModel() {
         return branchRateModel;
     }
-    
+
     public PartialsRescalingScheme getRescalingScheme() {
-    	return rescalingScheme;
+        return rescalingScheme;
     }
-    
+
     public Map<Set<String>, Parameter> getPartialsRestrictions() {
-    	return partialsRestrictions;
+        return partialsRestrictions;
     }
-    
+
     public boolean useAmbiguities() {
-    	return useAmbiguities;
+        return useAmbiguities;
     }
 
     protected int getScaleBufferCount() {
@@ -562,7 +570,7 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
     }
 
 
-//    public void setStates(int tipIndex, int[] states) {
+    //    public void setStates(int tipIndex, int[] states) {
 //        System.err.println("BTL:setStates");
 //        beagle.setTipStates(tipIndex, states);
 //        makeDirty();
@@ -573,6 +581,12 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
 //        beagle.getTipStates(tipIndex, states);
 //    }
 
+    public final void setPatternWeights(double[] patternWeights) {
+        this.patternWeights = patternWeights;
+        beagle.setPatternWeights(patternWeights);
+    }
+
+
     // **************************************************************
     // ModelListener IMPLEMENTATION
     // **************************************************************
@@ -582,32 +596,33 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
      */
     protected void handleModelChangedEvent(Model model, Object object, int index) {
 
-        fireModelChanged();
+//        fireModelChanged();
+        super.handleModelChangedEvent(model, object, index);
 
         if (model == treeModel) {
-            if (object instanceof TreeModel.TreeChangedEvent) {
-
-                if (((TreeModel.TreeChangedEvent) object).isNodeChanged()) {
-                    // If a node event occurs the node and its two child nodes
-                    // are flagged for updating (this will result in everything
-                    // above being updated as well. Node events occur when a node
-                    // is added to a branch, removed from a branch or its height or
-                    // rate changes.
-                    updateNodeAndChildren(((TreeModel.TreeChangedEvent) object).getNode());
-                    updateRestrictedNodePartials = true;
-
-                } else if (((TreeModel.TreeChangedEvent) object).isTreeChanged()) {
-                    // Full tree events result in a complete updating of the tree likelihood
-                    // This event type is now used for EmpiricalTreeDistributions.
-//                    System.err.println("Full tree update event - these events currently aren't used\n" +
-//                            "so either this is in error or a new feature is using them so remove this message.");
-                    updateAllNodes();
-                    updateRestrictedNodePartials = true;
-                } else {
-                    // Other event types are ignored (probably trait changes).
-                    //System.err.println("Another tree event has occured (possibly a trait change).");
-                }
-            }
+//            if (object instanceof TreeModel.TreeChangedEvent) {
+//
+//                if (((TreeModel.TreeChangedEvent) object).isNodeChanged()) {
+//                    // If a node event occurs the node and its two child nodes
+//                    // are flagged for updating (this will result in everything
+//                    // above being updated as well. Node events occur when a node
+//                    // is added to a branch, removed from a branch or its height or
+//                    // rate changes.
+//                    updateNodeAndChildren(((TreeModel.TreeChangedEvent) object).getNode());
+//                    updateRestrictedNodePartials = true;
+//
+//                } else if (((TreeModel.TreeChangedEvent) object).isTreeChanged()) {
+//                    // Full tree events result in a complete updating of the tree likelihood
+//                    // This event type is now used for EmpiricalTreeDistributions.
+////                    System.err.println("Full tree update event - these events currently aren't used\n" +
+////                            "so either this is in error or a new feature is using them so remove this message.");
+//                    updateAllNodes();
+//                    updateRestrictedNodePartials = true;
+//                } else {
+//                    // Other event types are ignored (probably trait changes).
+//                    //System.err.println("Another tree event has occured (possibly a trait change).");
+//                }
+//            }
 
         } else if (model == branchRateModel) {
             if (index == -1) {
@@ -621,16 +636,15 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
             }
 
         } else if (model == branchModel) {
-//            if (index == -1) {
-//                updateSubstitutionModel = true;
-//                updateAllNodes();
-//            } else {
-//                updateNode(treeModel.getNode(index));
-//            }
-
+            // if (index == -1) {
+            //     updateSubstitutionModel = true;
+            //     updateAllNodes();
+            // } else {
+            //     updateNode(treeModel.getNode(index));
+            // }
             makeDirty();
 
-        } else if (model == siteModel) {
+        } else if (model == siteRateModel) {
 
             updateSiteModel = true;
             updateAllNodes();
@@ -640,23 +654,26 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
                 for (int i = 0; i < treeModel.getNodeCount(); i++)
                     if (treeModel.getNodeTaxon(treeModel.getNode(i)) != null && treeModel.getNodeTaxon(treeModel.getNode(i)).getId().equalsIgnoreCase(((Taxon) object).getId()))
                         updateNode(treeModel.getNode(i));
-            } else
+            } else if (object instanceof Parameter) {
+                // ignore...
+            } else {
                 updateAllNodes();
+            }
         } else {
 
             throw new RuntimeException("Unknown componentChangedEvent");
         }
 
-        super.handleModelChangedEvent(model, object, index);
+//        super.handleModelChangedEvent(model, object, index);
     }
 
-    @Override
-    public void makeDirty() {
-        super.makeDirty();
-        updateSiteModel = true;
-        updateSubstitutionModel = true;
-        updateRestrictedNodePartials = true;
-    }
+//    @Override
+//    public void makeDirty() {
+//        super.makeDirty();
+//        updateSiteModel = true;
+//        updateSubstitutionModel = true;
+//        updateRestrictedNodePartials = true;
+//    }
 // **************************************************************
     // Model IMPLEMENTATION
     // **************************************************************
@@ -665,8 +682,8 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
      * Stores the additional state other than model components
      */
     protected void storeState() {
-        partialBufferHelper.storeState();
-        substitutionModelDelegate.storeState();
+//        partialBufferHelper.storeState();
+//        substitutionModelDelegate.storeState();
 
         if (useScaleFactors || useAutoScaling) { // Only store when actually used
             scaleBufferHelper.storeState();
@@ -682,10 +699,10 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
      * Restore the additional stored state
      */
     protected void restoreState() {
-        updateSiteModel = true; // this is required to upload the categoryRates to BEAGLE after the restore
-
-        partialBufferHelper.restoreState();
-        substitutionModelDelegate.restoreState();
+//        updateSiteModel = true; // this is required to upload the categoryRates to BEAGLE after the restore
+//
+//        partialBufferHelper.restoreState();
+//        substitutionModelDelegate.restoreState();
 
         if (useScaleFactors || useAutoScaling) {
             scaleBufferHelper.restoreState();
@@ -695,239 +712,14 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
 //            rescalingCount = storedRescalingCount;
         }
 
-        updateRestrictedNodePartials = true;
+//        updateRestrictedNodePartials = true;
 
         super.restoreState();
 
     }
 
-//    int marcCount = 0;
-    // **************************************************************
-    // Likelihood IMPLEMENTATION
-    // **************************************************************
-
-    /**
-     * Calculate the log likelihood of the current state.
-     *
-     * @return the log likelihood.
-     */
-    protected double calculateLogLikelihood() {
-
-        if (patternLogLikelihoods == null) {
-            patternLogLikelihoods = new double[patternCount];
-        }
-
-        if (branchUpdateIndices == null) {
-            branchUpdateIndices = new int[nodeCount];
-            branchLengths = new double[nodeCount];
-            scaleBufferIndices = new int[internalNodeCount];
-            storedScaleBufferIndices = new int[internalNodeCount];
-        }
-
-        if (operations == null) {
-            operations = new int[numRestrictedPartials + 1][internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
-            operationCount = new int[numRestrictedPartials + 1];
-        }
-
-        recomputeScaleFactors = false;
-
-        if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS) {
-            useScaleFactors = true;
-            recomputeScaleFactors = true;
-        } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC && everUnderflowed) {
-            useScaleFactors = true;
-            if (rescalingCountInner < RESCALE_TIMES) {
-                recomputeScaleFactors = true;
-                makeDirty();
-//                System.err.println("Recomputing scale factors");
-            }
-
-            rescalingCountInner++;
-            rescalingCount++;
-            if (rescalingCount > rescalingFrequency) {
-                rescalingCount = 0;
-                rescalingCountInner = 0;
-            }
-        } else if (this.rescalingScheme == PartialsRescalingScheme.DELAYED && everUnderflowed) {
-            useScaleFactors = true;
-            recomputeScaleFactors = true;
-            rescalingCount++;
-        }
-
-        if (tipStatesModel != null) {
-            int tipCount = treeModel.getExternalNodeCount();
-            for (int index = 0; index < tipCount; index++) {
-                if (updateNode[index]) {
-                    if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) {
-                        tipStatesModel.getTipPartials(index, tipPartials);
-                        beagle.setTipPartials(index, tipPartials);
-                    } else {
-                        tipStatesModel.getTipStates(index, tipStates);
-                        beagle.setTipStates(index, tipStates);
-                    }
-                }
-            }
-        }
-
-        branchUpdateCount = 0;
-        operationListCount = 0;
-
-        if (hasRestrictedPartials) {
-            for (int i = 0; i <= numRestrictedPartials; i++) {
-                operationCount[i] = 0;
-            }
-        } else {
-            operationCount[0] = 0;
-        }
-
-        final NodeRef root = treeModel.getRoot();
-        traverse(treeModel, root, null, true);
-
-        if (updateSubstitutionModel) { // TODO More efficient to update only the substitution model that changed, instead of all
-            substitutionModelDelegate.updateSubstitutionModels(beagle);
-
-            // we are currently assuming a no-category model...
-        }
-
-        if (updateSiteModel) {
-            double[] categoryRates = this.siteModel.getCategoryRates();
-            beagle.setCategoryRates(categoryRates);
-        }
-
-        if (branchUpdateCount > 0) {
-            substitutionModelDelegate.updateTransitionMatrices(
-                    beagle,
-                    branchUpdateIndices,
-                    branchLengths,
-                    branchUpdateCount);
-        }
-
-        if (COUNT_TOTAL_OPERATIONS) {
-            totalMatrixUpdateCount += branchUpdateCount;
-
-            for (int i = 0; i <= numRestrictedPartials; i++) {
-                totalOperationCount += operationCount[i];
-            }
-        }
-
-        double logL;
-        boolean done;
-        boolean firstRescaleAttempt = true;
-
-        do {
-
-            if (hasRestrictedPartials) {
-                for (int i = 0; i <= numRestrictedPartials; i++) {
-                    beagle.updatePartials(operations[i], operationCount[i], Beagle.NONE);
-                    if (i < numRestrictedPartials) {
-//                        restrictNodePartials(restrictedIndices[i]);
-                    }
-                }
-            } else {
-                beagle.updatePartials(operations[0], operationCount[0], Beagle.NONE);
-            }
-
-            int rootIndex = partialBufferHelper.getOffsetIndex(root.getNumber());
-
-            double[] categoryWeights = this.siteModel.getCategoryProportions();
-
-            // This should probably explicitly be the state frequencies for the root node...
-            double[] frequencies = substitutionModelDelegate.getRootStateFrequencies();
-
-            int cumulateScaleBufferIndex = Beagle.NONE;
-            if (useScaleFactors) {
-
-                if (recomputeScaleFactors) {
-                    scaleBufferHelper.flipOffset(internalNodeCount);
-                    cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
-                    beagle.resetScaleFactors(cumulateScaleBufferIndex);
-                    beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, cumulateScaleBufferIndex);
-                } else {
-                    cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
-                }
-            } else if (useAutoScaling) {
-                beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, Beagle.NONE);
-            }
-
-            // these could be set only when they change but store/restore would need to be considered
-            beagle.setCategoryWeights(0, categoryWeights);
-            beagle.setStateFrequencies(0, frequencies);
-
-            double[] sumLogLikelihoods = new double[1];
-
-            beagle.calculateRootLogLikelihoods(new int[]{rootIndex}, new int[]{0}, new int[]{0},
-                    new int[]{cumulateScaleBufferIndex}, 1, sumLogLikelihoods);
-
-            logL = sumLogLikelihoods[0];
-
-            if (ascertainedSitePatterns) {
-                // Need to correct for ascertainedSitePatterns
-                beagle.getSiteLogLikelihoods(patternLogLikelihoods);
-                logL = getAscertainmentCorrectedLogLikelihood((AscertainedSitePatterns) patternList,
-                        patternLogLikelihoods, patternWeights);
-            }
-
-            if (Double.isNaN(logL) || Double.isInfinite(logL)) {
-                everUnderflowed = true;
-                logL = Double.NEGATIVE_INFINITY;
-
-                if (firstRescaleAttempt && (rescalingScheme == PartialsRescalingScheme.DYNAMIC || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
-                    // we have had a potential under/over flow so attempt a rescaling
-                    if (rescalingScheme == PartialsRescalingScheme.DYNAMIC || (rescalingCount == 0)) {
-                        Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling...");
-                    }
-                    useScaleFactors = true;
-                    recomputeScaleFactors = true;
-
-                    branchUpdateCount = 0;
-
-                    if (hasRestrictedPartials) {
-                        for (int i = 0; i <= numRestrictedPartials; i++) {
-                            operationCount[i] = 0;
-                        }
-                    } else {
-                        operationCount[0] = 0;
-                    }
-
-                    // traverse again but without flipping partials indices as we
-                    // just want to overwrite the last attempt. We will flip the
-                    // scale buffer indices though as we are recomputing them.
-                    traverse(treeModel, root, null, false);
-
-                    done = false; // Run through do-while loop again
-                    firstRescaleAttempt = false; // Only try to rescale once
-                } else {
-                    // we have already tried a rescale, not rescaling or always rescaling
-                    // so just return the likelihood...
-                    done = true;
-                }
-            } else {
-                done = true; // No under-/over-flow, then done
-            }
-
-        } while (!done);
-
-        // If these are needed...
-        //beagle.getSiteLogLikelihoods(patternLogLikelihoods);
-
-        //********************************************************************
-        // after traverse all nodes and patterns have been updated --
-        //so change flags to reflect this.
-        for (int i = 0; i < nodeCount; i++) {
-            updateNode[i] = false;
-        }
-
-        updateSubstitutionModel = false;
-        updateSiteModel = false;
-        //********************************************************************
-
-        return logL;
-    }
-
-    public void getPartials(int number, double[] partials) {
-        int cumulativeBufferIndex = Beagle.NONE;
-        /* No need to rescale partials */
-        beagle.getPartials(partialBufferHelper.getOffsetIndex(number), cumulativeBufferIndex, partials);
+    public boolean arePartialsRescaled() {
+        return useScaleFactors;
     }
 
     protected void setPartials(int number, double[] partials) {
@@ -981,173 +773,216 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
         return logL;
     }
 
-    /**
-     * Traverse the tree calculating partial likelihoods.
-     *
-     * @param tree           tree
-     * @param node           node
-     * @param operatorNumber operatorNumber
-     * @param flip           flip
-     * @return boolean
-     */
-    private boolean traverse(Tree tree, NodeRef node, int[] operatorNumber, boolean flip) {
-
-        boolean update = false;
-
-        int nodeNum = node.getNumber();
-
-        NodeRef parent = tree.getParent(node);
-
-        if (operatorNumber != null) {
-            operatorNumber[0] = -1;
+    @Override
+    protected void prepareStorage() {
+        super.prepareStorage();
+        if (scaleBufferIndices == null) {
+            scaleBufferIndices = new int[internalNodeCount];
+            storedScaleBufferIndices = new int[internalNodeCount];
         }
+    }
+
+    @Override
+    final protected void handlePartialsScaling(final int[] operations, final int nodeNum, final int x) {
+        if (useScaleFactors) {
+            // get the index of this scaling buffer
+            int n = nodeNum - tipCount;
 
-        // First update the transition probability matrix(ices) for this branch
-        if (parent != null && updateNode[nodeNum]) {
+            if (recomputeScaleFactors) {
+                // flip the indicator: can take either n or (internalNodeCount + 1) - n
+                scaleBufferHelper.flipOffset(n);
 
-            final double branchRate = branchRateModel.getBranchRate(tree, node);
+                // store the index
+                scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);
 
-            final double parentHeight = tree.getNodeHeight(parent);
-            final double nodeHeight = tree.getNodeHeight(node);
+                operations[x + 1] = scaleBufferIndices[n]; // Write new scaleFactor
+                operations[x + 2] = Beagle.NONE;
 
-            // Get the operational time of the branch
-            final double branchLength = branchRate * (parentHeight - nodeHeight);
-            if (branchLength < 0.0) {
-                throw new RuntimeException("Negative branch length: " + branchLength);
+            } else {
+                operations[x + 1] = Beagle.NONE;
+                operations[x + 2] = scaleBufferIndices[n]; // Read existing scaleFactor
             }
+        } else {
 
-            if (flip) {
-                substitutionModelDelegate.flipMatrixBuffer(nodeNum);
+            if (useAutoScaling) {
+                scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum);
             }
-            branchUpdateIndices[branchUpdateCount] = nodeNum;
-            branchLengths[branchUpdateCount] = branchLength;
-            branchUpdateCount++;
-
-            update = true;
+            operations[x + 1] = Beagle.NONE; // Not using scaleFactors
+            operations[x + 2] = Beagle.NONE;
         }
+    }
 
-        // If the node is internal, update the partial likelihoods.
-        if (!tree.isExternal(node)) {
+    @Override
+    final protected int accumulateScaleFactors() {
+        int cumulateScaleBufferIndex = Beagle.NONE;
+        if (useScaleFactors) {
+
+            if (recomputeScaleFactors) {
+                scaleBufferHelper.flipOffset(internalNodeCount);
+                cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
+                beagle.resetScaleFactors(cumulateScaleBufferIndex);
+                beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, cumulateScaleBufferIndex);
+            } else {
+                cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
+            }
+        } else if (useAutoScaling) {
+            beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, Beagle.NONE);
+        }
+        return cumulateScaleBufferIndex;
+    }
 
-            // Traverse down the two child nodes
-            NodeRef child1 = tree.getChild(node, 0);
-            final int[] op1 = {-1};
-            final boolean update1 = traverse(tree, child1, op1, flip);
+    @Override
+    final protected boolean updateBranchSpecificEvolutionaryProcess(final Tree tree, final int nodeNum, final NodeRef parent,
+                                                              final NodeRef node, final boolean flip) {
 
-            NodeRef child2 = tree.getChild(node, 1);
-            final int[] op2 = {-1};
-            final boolean update2 = traverse(tree, child2, op2, flip);
+        final double branchRate;
 
-            // If either child node was updated then update this node too
-            if (update1 || update2) {
+        synchronized (branchRateModel) {
+            branchRate = branchRateModel.getBranchRate(tree, node);
+        }
+        final double parentHeight = tree.getNodeHeight(parent);
+        final double nodeHeight = tree.getNodeHeight(node);
 
-                int x = operationCount[operationListCount] * Beagle.OPERATION_TUPLE_SIZE;
+        // Get the operational time of the branch
+        final double branchLength = branchRate * (parentHeight - nodeHeight);
+        if (branchLength < 0.0) {
+            throw new RuntimeException("Negative branch length: " + branchLength);
+        }
 
-                if (flip) {
-                    // first flip the partialBufferHelper
-                    partialBufferHelper.flipOffset(nodeNum);
-                }
+        if (flip) {
+            substitutionModelDelegate.flipMatrixBuffer(nodeNum);
+        }
 
-                final int[] operations = this.operations[operationListCount];
+        branchUpdateIndices[branchUpdateCount] = nodeNum;
+        branchLengths[branchUpdateCount] = branchLength;
+        branchUpdateCount++;
 
-                operations[x] = partialBufferHelper.getOffsetIndex(nodeNum);
+        return true;
+    }
 
-                if (useScaleFactors) {
-                    // get the index of this scaling buffer
-                    int n = nodeNum - tipCount;
+    @Override
+    final protected void prepareTips() {
+        if (tipStatesModel != null) {
+            int tipCount = treeModel.getExternalNodeCount();
+            for (int index = 0; index < tipCount; index++) {
+                if (updateNode[index]) {
+                    if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) {
+                        tipStatesModel.getTipPartials(index, tipPartials);
+                        beagle.setTipPartials(index, tipPartials);
+                    } else {
+                        tipStatesModel.getTipStates(index, tipStates);
+                        beagle.setTipStates(index, tipStates);
+                    }
+                }
+            }
+        }
+    }
 
-                    if (recomputeScaleFactors) {
-                        // flip the indicator: can take either n or (internalNodeCount + 1) - n
-                        scaleBufferHelper.flipOffset(n);
+    @Override
+    final protected void prepareScaling() {
 
-                        // store the index
-                        scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);
+        recomputeScaleFactors = false;
 
-                        operations[x + 1] = scaleBufferIndices[n]; // Write new scaleFactor
-                        operations[x + 2] = Beagle.NONE;
+        if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS) {
+            useScaleFactors = true;
+            recomputeScaleFactors = true;
+        } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC && everUnderflowed) {
+            useScaleFactors = true;
+            if (rescalingCountInner < RESCALE_TIMES) {
+                recomputeScaleFactors = true;
+                makeDirty();
+                //                System.err.println("Recomputing scale factors");
+            }
 
-                    } else {
-                        operations[x + 1] = Beagle.NONE;
-                        operations[x + 2] = scaleBufferIndices[n]; // Read existing scaleFactor
-                    }
+            rescalingCountInner++;
+            rescalingCount++;
+            if (rescalingCount > rescalingFrequency) {
+                rescalingCount = 0;
+                rescalingCountInner = 0;
+            }
+        } else if (this.rescalingScheme == PartialsRescalingScheme.DELAYED && everUnderflowed) {
+            useScaleFactors = true;
+            recomputeScaleFactors = true;
+            rescalingCount++;
+        }
+    }
 
-                } else {
+    @Override
+    final protected void handleRestrictedPartials(final int nodeNum) {
+        if (hasRestrictedPartials) {
+            // Test if this set of partials should be restricted
+            if (updateRestrictedNodePartials) {
+                // Recompute map
+                computeNodeToRestrictionMap();
+                updateRestrictedNodePartials = false;
+            }
+            if (partialsMap[nodeNum] != null) {
 
-                    if (useAutoScaling) {
-                        scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum);
-                    }
-                    operations[x + 1] = Beagle.NONE; // Not using scaleFactors
-                    operations[x + 2] = Beagle.NONE;
-                }
+            }
+        }
+    }
 
-                operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNumber()); // source node 1
-                operations[x + 4] = substitutionModelDelegate.getMatrixIndex(child1.getNumber()); // source matrix 1
-                operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNumber()); // source node 2
-                operations[x + 6] = substitutionModelDelegate.getMatrixIndex(child2.getNumber()); // source matrix 2
+    @Override
+    final protected void updateRootInformation() {
+        double[] categoryWeights = this.siteRateModel.getCategoryProportions();
 
-                operationCount[operationListCount]++;
+        // This should probably explicitly be the state frequencies for the root node...
+        double[] frequencies = substitutionModelDelegate.getRootStateFrequencies();
 
-                update = true;
+        // these could be set only when they change but store/restore would need to be considered
+        beagle.setCategoryWeights(0, categoryWeights);
+        beagle.setStateFrequencies(0, frequencies);
+    }
 
-                if (hasRestrictedPartials) {
-                    // Test if this set of partials should be restricted
-                    if (updateRestrictedNodePartials) {
-                        // Recompute map
-                        computeNodeToRestrictionMap();
-                        updateRestrictedNodePartials = false;
-                    }
-                    if (partialsMap[nodeNum] != null) {
+    @Override
+    final protected void updateSiteModelAction() {
+        double[] categoryRates = this.siteRateModel.getCategoryRates();
+        beagle.setCategoryRates(categoryRates);
+    }
 
-                    }
-                }
+    @Override
+    final protected double computedAscertainedLogLikelihood() {
+        beagle.getSiteLogLikelihoods(patternLogLikelihoods);
+        double logL = getAscertainmentCorrectedLogLikelihood((AscertainedSitePatterns) patternList,
+                patternLogLikelihoods, patternWeights);
+        return logL;
+    }
 
+    @Override
+    final protected boolean doRescalingNow(boolean firstRescaleAttempt) {
+        if (firstRescaleAttempt && (rescalingScheme == PartialsRescalingScheme.DYNAMIC || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
+            // we have had a potential under/over flow so attempt a rescaling
+            if (rescalingScheme == PartialsRescalingScheme.DYNAMIC || (rescalingCount == 0)) {
+                Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling...");
             }
-        }
-
-        return update;
+            useScaleFactors = true;
+            recomputeScaleFactors = true;
 
+            return true;
+        } else {
+            return false;
+        }
     }
 
     // **************************************************************
     // INSTANCE VARIABLES
     // **************************************************************
 
-    private int[] branchUpdateIndices;
-    private double[] branchLengths;
-    private int branchUpdateCount;
 
+    // Member variables for rescaling
     private int[] scaleBufferIndices;
     private int[] storedScaleBufferIndices;
 
-    private int[][] operations;
-    private int operationListCount;
-    private int[] operationCount;
-    //    private final boolean hasRestrictedPartials;
-    private static final boolean hasRestrictedPartials = false;
-
-    private final int numRestrictedPartials;
-    private final Map<Set<String>, Parameter> partialsRestrictions;
-    private Parameter[] partialsMap;
-    private double[] partials;
-    private boolean updateRestrictedNodePartials;
-//    private int[] restrictedIndices;
-
-    protected BufferIndexHelper partialBufferHelper;
     protected BufferIndexHelper scaleBufferHelper;
 
-    protected final int tipCount;
-    protected final int internalNodeCount;
-
     private PartialsRescalingScheme rescalingScheme;
     private int rescalingFrequency = RESCALE_FREQUENCY;
 
     protected boolean useScaleFactors = false;
     private boolean useAutoScaling = false;
     private boolean recomputeScaleFactors = false;
-    private boolean everUnderflowed = false;
     private int rescalingCount = 0;
     private int rescalingCountInner = 0;
-//    private int storedRescalingCount;
 
     /**
      * the branch-site model for these sites
@@ -1162,7 +997,7 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
     /**
      * the site model for these sites
      */
-    protected final SiteModel siteModel;
+    protected final SiteRateModel siteRateModel;
 
     /**
      * the branch rate model
@@ -1195,30 +1030,119 @@ public class NewBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
     protected int[] tipStates;
 
     /**
-     * the BEAGLE library instance
+     * Flag to specify if ambiguity codes are in use
      */
-    protected Beagle beagle;
+    protected final boolean useAmbiguities;
 
-    /**
-     * Flag to specify that the substitution model has changed
-     */
-    protected boolean updateSubstitutionModel;
+    public static void main(String[] args) {
 
-    /**
-     * Flag to specify that the site model has changed
-     */
-    protected boolean updateSiteModel;
+        try {
 
-    /**
-     * Flag to specify if site patterns are acertained
-     */
+            MathUtils.setSeed(666);
 
-    private boolean ascertainedSitePatterns = false;
+            System.out.println("Test case 1: simulateOnePartition");
 
+            int sequenceLength = 1000;
+            ArrayList<Partition> partitionsList = new ArrayList<Partition>();
 
-    /***
-     * Flag to specify if ambiguity codes are in use
-     */
-    protected final boolean useAmbiguities;
+            // create tree
+            NewickImporter importer = new NewickImporter(
+                    "(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
+            Tree tree = importer.importTree(null);
+            TreeModel treeModel = new TreeModel(tree);
+
+            // create Frequency Model
+            Parameter freqs = new Parameter.Default(new double[]{0.25, 0.25,
+                    0.25, 0.25});
+            FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE,
+                    freqs);
+
+            // create branch model
+            Parameter kappa1 = new Parameter.Default(1, 1);
+            Parameter kappa2 = new Parameter.Default(1, 1);
+
+            HKY hky1 = new HKY(kappa1, freqModel);
+            HKY hky2 = new HKY(kappa2, freqModel);
+
+            HomogeneousBranchModel homogenousBranchSubstitutionModel = new HomogeneousBranchModel(
+                    hky1);
+
+            List<SubstitutionModel> substitutionModels = new ArrayList<SubstitutionModel>();
+            substitutionModels.add(hky1);
+            substitutionModels.add(hky2);
+            List<FrequencyModel> freqModels = new ArrayList<FrequencyModel>();
+            freqModels.add(freqModel);
+
+            Parameter epochTimes = new Parameter.Default(1, 20);
+
+            // create branch rate model
+            Parameter rate = new Parameter.Default(1, 0.001);
+            BranchRateModel branchRateModel = new StrictClockBranchRates(rate);
+
+            // create site model
+            GammaSiteRateModel siteRateModel = new GammaSiteRateModel(
+                    "siteModel");
+
+            BranchModel homogeneousBranchModel = new HomogeneousBranchModel(hky1);
+
+            BranchModel epochBranchModel = new EpochBranchModel(treeModel, substitutionModels, epochTimes);
+
+            // create partition
+            Partition partition1 = new Partition(treeModel, //
+                    homogenousBranchSubstitutionModel,//
+                    siteRateModel, //
+                    branchRateModel, //
+                    freqModel, //
+                    0, // from
+                    sequenceLength - 1, // to
+                    1 // every
+            );
+
+            partitionsList.add(partition1);
+
+            // feed to sequence simulator and generate data
+            BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList
+//            		, sequenceLength
+            );
+            Alignment alignment = simulator.simulate(false, false);
+
+            BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+
+            System.out.println("nBTL(homogeneous) = " + nbtl.getLogLikelihood());
+
+            nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+
+            System.out.println("nBTL(epoch) = " + nbtl.getLogLikelihood());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            System.exit(-1);
+        } // END: try-catch block
+    }
+
+    public Double getUpdateTimer() {
+        return Double.valueOf(substitutionModelDelegate.updateTime);
+    }
+
+    public Double getConvolveTimer() {
+        return Double.valueOf(substitutionModelDelegate.convolveTime);
+    }
+
+    public void getLogScalingFactors(int nodeIndex, double[] buffer) {
+        if (nodeIndex < tipCount) {
+            Arrays.fill(buffer, 0.0);
+        } else {
+//            final int scaleIndex = scaleBufferHelper.getOffsetIndex(nodeIndex - tipCount);
+            final int scaleIndex = scaleBufferIndices[nodeIndex - tipCount];
+            beagle.getLogScaleFactors(scaleIndex, buffer);
+        }
+    }
+
+    public double[] getSiteLogLikelihoods() {
+        getLogLikelihood();
+        double[] siteLogLikelihoods = new double[patternCount];
+        beagle.getSiteLogLikelihoods(siteLogLikelihoods);
+        return siteLogLikelihoods;
+    }
 
 }//END: class
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/newtreelikelihood/NewFullyConjugateTraitLikelihood.java b/src/dr/app/beagle/evomodel/newtreelikelihood/NewFullyConjugateTraitLikelihood.java
new file mode 100644
index 0000000..df1acf5
--- /dev/null
+++ b/src/dr/app/beagle/evomodel/newtreelikelihood/NewFullyConjugateTraitLikelihood.java
@@ -0,0 +1,105 @@
+/*
+ * NewFullyConjugateTraitLikelihood.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.evomodel.newtreelikelihood;
+
+import dr.app.beagle.evomodel.parsers.BeagleTreeLikelihoodParser;
+import dr.evomodel.branchratemodel.BranchRateModel;
+import dr.evomodel.continuous.MultivariateDiffusionModel;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.CompoundParameter;
+import dr.inference.model.Model;
+import dr.inference.model.Parameter;
+import dr.inference.model.ThreadAwareLikelihood;
+import dr.math.distributions.WishartSufficientStatistics;
+import dr.math.interfaces.ConjugateWishartStatisticsProvider;
+import dr.xml.Reportable;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class NewFullyConjugateTraitLikelihood extends NewAbstractLikelihoodOnTree implements ThreadAwareLikelihood,
+        ConjugateWishartStatisticsProvider, Reportable {
+
+    public NewFullyConjugateTraitLikelihood(String traitName,
+                                            TreeModel treeModel,
+                                            MultivariateDiffusionModel diffusionModel,
+                                            CompoundParameter traitParameter,
+                                            Parameter deltaParameter,
+                                            List<Integer> missingIndices,
+                                            boolean cacheBranches,
+                                            boolean scaleByTime,
+                                            boolean useTreeLength,
+                                            BranchRateModel diffusionRateModel,
+                                            List<BranchRateModel> driftRateModels,
+                                            BranchRateModel selectionRateModel,
+                                            Model samplingDensity,
+                                            boolean reportAsMultivariate,
+                                            double[] rootPriorMean,
+                                            double rootPriorSampleSize,
+                                            boolean reciprocalRates,
+
+
+                                            String name, Map<Set<String>, Parameter> partialsRestrictions) {
+        super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, // TODO Change
+                treeModel, partialsRestrictions);
+
+        this.diffusionModel = diffusionModel;
+        this.traitParameter = traitParameter;
+        this.diffusionRateModel = diffusionRateModel;
+        this.driftRateModels = driftRateModels;
+        this.selectionRateModel = selectionRateModel;
+
+        this.rootPriorMean = rootPriorMean;
+        this.rootPriorSampleSize = rootPriorSampleSize;
+    }
+
+    @Override
+    public WishartSufficientStatistics getWishartStatistics() {
+        computeWishartStatistics = true;
+        calculateLogLikelihood();
+        computeWishartStatistics = false;
+        return wishartStatistics;
+    }
+
+    final protected MultivariateDiffusionModel diffusionModel;
+    final protected CompoundParameter traitParameter;
+    final protected BranchRateModel diffusionRateModel;
+    final protected List<BranchRateModel> driftRateModels;
+    final protected BranchRateModel selectionRateModel;
+
+    protected boolean computeWishartStatistics = false;
+    protected WishartSufficientStatistics wishartStatistics;
+
+    // Fully-conjugate-specific
+    final double[] rootPriorMean;
+    final double rootPriorSampleSize;
+
+
+}
diff --git a/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
index 8dcf49b..ded410e 100644
--- a/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
@@ -33,6 +33,7 @@ import dr.app.beagle.evomodel.treelikelihood.AncestralStateBeagleTreeLikelihood;
 import dr.app.beagle.evomodel.treelikelihood.BeagleTreeLikelihood;
 import dr.app.beagle.evomodel.treelikelihood.PartialsRescalingScheme;
 import dr.evolution.alignment.PatternList;
+import dr.evolution.alignment.SiteList;
 import dr.evolution.datatype.DataType;
 import dr.evolution.util.TaxonList;
 import dr.evomodel.branchratemodel.BranchRateModel;
@@ -62,24 +63,24 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
         return RECONSTRUCTING_TREE_LIKELIHOOD;
     }
 
-	protected BeagleTreeLikelihood createTreeLikelihood(
-			PatternList patternList, //
-			TreeModel treeModel, //
-			BranchModel branchModel, //
-			GammaSiteRateModel siteRateModel, //
-			BranchRateModel branchRateModel, //
-			TipStatesModel tipStatesModel, //
-			boolean useAmbiguities, //
-			PartialsRescalingScheme scalingScheme, //
-			Map<Set<String>, //
-			Parameter> partialsRestrictions, //
-			XMLObject xo //
-	) throws XMLParseException {
-
-		
+    protected BeagleTreeLikelihood createTreeLikelihood(
+            PatternList patternList, //
+            TreeModel treeModel, //
+            BranchModel branchModel, //
+            GammaSiteRateModel siteRateModel, //
+            BranchRateModel branchRateModel, //
+            TipStatesModel tipStatesModel, //
+            boolean useAmbiguities, //
+            PartialsRescalingScheme scalingScheme, //
+            Map<Set<String>, //
+                    Parameter> partialsRestrictions, //
+            XMLObject xo //
+    ) throws XMLParseException {
+
+
 //		System.err.println("XML object: " + xo.toString());
-	
-		DataType dataType = branchModel.getRootSubstitutionModel().getDataType();
+
+        DataType dataType = branchModel.getRootSubstitutionModel().getDataType();
 
         // default tag is RECONSTRUCTION_TAG
         String tag = xo.getAttribute(RECONSTRUCTION_TAG_NAME, RECONSTRUCTION_TAG);
@@ -87,6 +88,10 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
         boolean useMAP = xo.getAttribute(MAP_RECONSTRUCTION, false);
         boolean useMarginalLogLikelihood = xo.getAttribute(MARGINAL_LIKELIHOOD, true);
 
+        if (patternList.areUnique()) {
+            throw new XMLParseException("Ancestral state reconstruction cannot be used with compressed (unique) patterns.");
+        }
+
         return new AncestralStateBeagleTreeLikelihood(  // Current just returns a OldBeagleTreeLikelihood
                 patternList,
                 treeModel,
@@ -120,7 +125,7 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
                         new ElementRule(TaxonList.class),
                         new ElementRule(Parameter.class),
                 }, true),
-            new ElementRule(FrequencyModel.class, true),
+                new ElementRule(FrequencyModel.class, true),
         };
     }
 }
diff --git a/src/dr/app/beagle/evomodel/parsers/FrequencyModelParser.java b/src/dr/app/beagle/evomodel/parsers/FrequencyModelParser.java
index 3fae8b9..54ce9fb 100644
--- a/src/dr/app/beagle/evomodel/parsers/FrequencyModelParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/FrequencyModelParser.java
@@ -26,14 +26,21 @@
 package dr.app.beagle.evomodel.parsers;
 
 import dr.app.beagle.evomodel.substmodel.FrequencyModel;
+import dr.app.bss.Utils;
 import dr.evolution.alignment.PatternList;
+import dr.evolution.datatype.Codons;
 import dr.evolution.datatype.DataType;
+import dr.evolution.datatype.GeneticCode;
 import dr.evolution.datatype.HiddenDataType;
+import dr.evolution.datatype.Nucleotides;
 import dr.evoxml.util.DataTypeUtils;
 import dr.inference.model.Parameter;
 import dr.xml.*;
 
 import java.text.NumberFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 import java.util.logging.Logger;
 
 /**
@@ -42,110 +49,247 @@ import java.util.logging.Logger;
  */
 public class FrequencyModelParser extends AbstractXMLObjectParser {
 
-    public static final String FREQUENCIES = "frequencies";
-    public static final String FREQUENCY_MODEL = "frequencyModel";
-    public static final String NORMALIZE = "normalize";
-    public static final String COMPRESS = "compress";
-
-    public String getParserName() {
-        return FREQUENCY_MODEL;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-        DataType dataType = DataTypeUtils.getDataType(xo);
-
-        Parameter freqsParam = (Parameter) xo.getElementFirstChild(FREQUENCIES);
-        double[] frequencies = null;
-
-        for (int i = 0; i < xo.getChildCount(); i++) {
-            Object obj = xo.getChild(i);
-            if (obj instanceof PatternList) {
-                PatternList patternList = (PatternList) obj;
-                if (xo.getAttribute(COMPRESS, false) && (patternList.getDataType() instanceof HiddenDataType)) {
-                    double[] hiddenFrequencies = patternList.getStateFrequencies();
-                    int hiddenCount = ((HiddenDataType) patternList.getDataType()).getHiddenClassCount();
-                    int baseStateCount = hiddenFrequencies.length / hiddenCount;
-                    frequencies = new double[baseStateCount];
-                    for (int j = 0; j < baseStateCount; ++j) {
-                        for (int k = 0; k < hiddenCount; ++k) {
-                            frequencies[j] += hiddenFrequencies[j + k * baseStateCount];
-                        }
-                    }
-                } else {
-                    frequencies = patternList.getStateFrequencies();
-                }
-                break;
-            }
-        }
-
-        StringBuilder sb = new StringBuilder("Creating state frequencies model '" + freqsParam.getParameterName() + "': ");
-        if (frequencies != null) {
-            if (freqsParam.getDimension() != frequencies.length) {
-                throw new XMLParseException("dimension of frequency parameter and number of sequence states don't match.");
-            }
-            for (int j = 0; j < frequencies.length; j++) {
-                freqsParam.setParameterValue(j, frequencies[j]);
-            }
-            sb.append("Using empirical frequencies from data ");
-        } else {
-            sb.append("Initial frequencies ");
-        }
-        sb.append("= {");
-
-        if (xo.getAttribute(NORMALIZE, false)) {
-            double sum = 0;
-            for (int j = 0; j < freqsParam.getDimension(); j++)
-                sum += freqsParam.getParameterValue(j);
-            for (int j = 0; j < freqsParam.getDimension(); j++) {
-                if (sum != 0)
-                    freqsParam.setParameterValue(j, freqsParam.getParameterValue(j) / sum);
-                else
-                    freqsParam.setParameterValue(j, 1.0 / freqsParam.getDimension());
-            }
-        }
-
-        NumberFormat format = NumberFormat.getNumberInstance();
-        format.setMaximumFractionDigits(5);
-
-        sb.append(format.format(freqsParam.getParameterValue(0)));
-        for (int j = 1; j < freqsParam.getDimension(); j++) {
-            sb.append(", ");
-            sb.append(format.format(freqsParam.getParameterValue(j)));
-        }
-        sb.append("}");
-        Logger.getLogger("dr.evomodel").info(sb.toString());
-
-        return new FrequencyModel(dataType, freqsParam);
-    }
-
-    public String getParserDescription() {
-        return "A model of equilibrium base frequencies.";
-    }
-
-    public Class getReturnType() {
-        return FrequencyModel.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private final XMLSyntaxRule[] rules = {
-            AttributeRule.newBooleanRule(NORMALIZE, true),
-            AttributeRule.newBooleanRule(COMPRESS, true),
-
-            new ElementRule(PatternList.class, "Initial value", 0, 1),
-
-            new XORRule(
-                    new StringAttributeRule(DataType.DATA_TYPE, "The type of sequence data",
-                            DataType.getRegisteredDataTypeNames(), false),
-                    new ElementRule(DataType.class)
-            ),
-
-            new ElementRule(FREQUENCIES,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-
-    };
+	public static final String FREQUENCIES = "frequencies";
+	public static final String FREQUENCY_MODEL = "frequencyModel";
+	public static final String NORMALIZE = "normalize";
+	public static final String COMPRESS = "compress";
+
+	public static final String COMPOSITION = "composition";
+	public static final String FREQ_3x4 = "3x4";
+	public static final String[] COMPOSITION_TYPES = new String[] { FREQ_3x4 };
+
+	public String getParserName() {
+		return FREQUENCY_MODEL;
+	}
+
+	public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+		DataType dataType = DataTypeUtils.getDataType(xo);
+
+		Parameter freqsParam = (Parameter) xo.getElementFirstChild(FREQUENCIES);
+		double[] frequencies = null;
+
+		for (int i = 0; i < xo.getChildCount(); i++) {
+			Object obj = xo.getChild(i);
+			if (obj instanceof PatternList) {
+
+				PatternList patternList = (PatternList) obj;
+				if (xo.getAttribute(COMPRESS, false)
+						&& (patternList.getDataType() instanceof HiddenDataType)) {
+
+					double[] hiddenFrequencies = patternList
+							.getStateFrequencies();
+					int hiddenCount = ((HiddenDataType) patternList
+							.getDataType()).getHiddenClassCount();
+					int baseStateCount = hiddenFrequencies.length / hiddenCount;
+					frequencies = new double[baseStateCount];
+					for (int j = 0; j < baseStateCount; ++j) {
+						for (int k = 0; k < hiddenCount; ++k) {
+							frequencies[j] += hiddenFrequencies[j + k
+									* baseStateCount];
+						}
+					}
+
+				} else {
+
+					// TODO
+					if (xo.hasAttribute(COMPOSITION)) {
+
+						String type = xo.getStringAttribute(COMPOSITION);
+						if (type.equalsIgnoreCase(FREQ_3x4)) {
+
+							frequencies = getEmpirical3x4Freqs(patternList);
+
+						}
+
+					} else {
+
+					frequencies = patternList.getStateFrequencies();
+					
+					} // END: composition check
+					
+				}
+				break;
+			}// END: patternList check
+		}
+
+		StringBuilder sb = new StringBuilder(
+				"Creating state frequencies model '"
+						+ freqsParam.getParameterName() + "': ");
+		if (frequencies != null) {
+
+			if (freqsParam.getDimension() != frequencies.length) {
+				throw new XMLParseException(
+						"dimension of frequency parameter and number of sequence states don't match.");
+			}
+
+			for (int j = 0; j < frequencies.length; j++) {
+				freqsParam.setParameterValue(j, frequencies[j]);
+			}
+
+			sb.append("Using empirical frequencies from data ");
+
+		} else {
+			sb.append("Initial frequencies ");
+		}
+		sb.append("= {");
+
+		if (xo.getAttribute(NORMALIZE, false)) {
+			double sum = 0;
+			for (int j = 0; j < freqsParam.getDimension(); j++)
+				sum += freqsParam.getParameterValue(j);
+			for (int j = 0; j < freqsParam.getDimension(); j++) {
+				if (sum != 0)
+					freqsParam.setParameterValue(j,
+							freqsParam.getParameterValue(j) / sum);
+				else
+					freqsParam.setParameterValue(j,
+							1.0 / freqsParam.getDimension());
+			}
+		}
+
+		NumberFormat format = NumberFormat.getNumberInstance();
+		format.setMaximumFractionDigits(5);
+
+		sb.append(format.format(freqsParam.getParameterValue(0)));
+		for (int j = 1; j < freqsParam.getDimension(); j++) {
+			sb.append(", ");
+			sb.append(format.format(freqsParam.getParameterValue(j)));
+		}
+		sb.append("}");
+		Logger.getLogger("dr.evomodel").info(sb.toString());
+
+		return new FrequencyModel(dataType, freqsParam);
+	}// END: parseXMLObject
+
+	private double[] getEmpirical3x4Freqs(PatternList patternList) {
+
+		DataType nucleotideDataType = Nucleotides.INSTANCE;
+		Codons codonDataType = Codons.UNIVERSAL; 
+        List<String> stopCodonsList =  Arrays.asList(Utils.STOP_CODONS);
+		
+		int cStateCount = codonDataType.getStateCount(); 
+		int nStateCount = nucleotideDataType.getStateCount(); 
+		int npos = 3;  
+		
+		double[] stopCodonFreqs = new double[Utils.STOP_CODONS.length];
+		double counts[][] = new double[nStateCount][npos];
+		int countsPos[] = new int[npos];
+		
+		int patternCount = patternList.getPatternCount();
+		for (int i = 0; i < patternCount; i++) {
+			
+			int[] sitePatterns = patternList.getPattern(i);
+			for (int k = 0; k < sitePatterns.length; k++) {
+			
+				int codonState = sitePatterns[k];
+				int[] nucleotideStates = codonDataType.getTripletStates(codonState);
+				
+				String triplet = codonDataType.getTriplet(codonState);
+				
+//				if(triplet.equals("TAA")) {
+//					System.err.println(triplet);
+//				}
+				
+				if(stopCodonsList.contains(triplet)) {
+					
+					int stopCodonIndex = stopCodonsList.indexOf(triplet);
+					stopCodonFreqs[stopCodonIndex]++;
+					
+				}//END: stopCodon check
+				
+				for(int pos = 0; pos < npos; pos++) {
+					
+					int nucleotideState = nucleotideStates[pos];
+					counts[nucleotideState][pos]++;
+					countsPos[pos]++;
+					
+				}//END: nucleotide positions loop
+				
+			}//END: sitePatterns loop
+		}// sites loop
+		
+		int total = 0;
+		for (int pos = 0; pos < npos; pos++) {
+
+			int totalPos = countsPos[pos];
+			for (int s = 0; s < nStateCount; s++) {
+				counts[s][pos] = counts[s][pos] / totalPos;
+			}//END: nucleotide states loop
+			
+			total += totalPos;
+		}//END: nucleotide positions loop
+
+//		Utils.printArray(stopCodonFreqs);
+//		System.out.println(stopCodonFreqs.length);
+		
+		// add stop codon frequencies
+		double pi = 0.0;
+		for(int i=0;i<stopCodonFreqs.length;i++) {
+			double freq = stopCodonFreqs[i] / total;
+			pi += freq;
+		}
+		
+//		System.out.println(pi);
+		
+		double[] freqs = new double[cStateCount];
+		Arrays.fill(freqs, 1.0);
+		for (int codonState = 0; codonState < cStateCount; codonState++) {
+			
+			int[] nucleotideStates = codonDataType.getTripletStates(codonState);
+			for (int pos = 0; pos < npos; pos++) {
+				
+				int nucleotide = nucleotideStates[pos];
+				freqs[codonState] *= counts[nucleotide][pos];
+				
+			}// END: nucleotide positions loop
+			
+			// TODO: stop codons freqs
+			freqs[codonState] = freqs[codonState] / (1-pi);
+			
+		}//END: codon states loop
+		
+		
+		
+		
+		
+		
+		
+		
+		return freqs;
+	}// END: getEmpirical3x4Freqs
+
+
+	public String getParserDescription() {
+		return "A model of equilibrium base frequencies.";
+	}
+
+	public Class getReturnType() {
+		return FrequencyModel.class;
+	}
+
+	public XMLSyntaxRule[] getSyntaxRules() {
+		return rules;
+	}
+
+	private final XMLSyntaxRule[] rules = {
+
+			new StringAttributeRule(COMPOSITION, "Composition type",
+					COMPOSITION_TYPES, true),
+
+			AttributeRule.newBooleanRule(NORMALIZE, true),
+			AttributeRule.newBooleanRule(COMPRESS, true),
+
+			new ElementRule(PatternList.class, "Initial value", 0, 1),
+
+			new XORRule(new StringAttributeRule(DataType.DATA_TYPE,
+					"The type of sequence data",
+					DataType.getRegisteredDataTypeNames(), false),
+					new ElementRule(DataType.class)),
+
+			new ElementRule(FREQUENCIES, new XMLSyntaxRule[] { new ElementRule(
+					Parameter.class) }),
+
+	};
 
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
index 70592f1..d5102c1 100644
--- a/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
@@ -93,6 +93,10 @@ public class MarkovJumpsTreeLikelihoodParser extends AncestralStateTreeLikelihoo
         boolean reportUnconditionedColumns = xo.getAttribute(REPORT_UNCONDITIONED_COLUMNS, false);
         int nSimulants = xo.getAttribute(NUMBER_OF_SIMULANTS, 1);
 
+        if (patternList.areUnique()) {
+            throw new XMLParseException("Markov Jumps reconstruction cannot be used with compressed (unique) patterns.");
+        }
+
         MarkovJumpsBeagleTreeLikelihood treeLikelihood = new MarkovJumpsBeagleTreeLikelihood(
                 patternList,
                 treeModel,
diff --git a/src/dr/app/beagle/evomodel/parsers/NewBeagleTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/NewBeagleTreeLikelihoodParser.java
new file mode 100644
index 0000000..c20e86d
--- /dev/null
+++ b/src/dr/app/beagle/evomodel/parsers/NewBeagleTreeLikelihoodParser.java
@@ -0,0 +1,232 @@
+/*
+ * NewBeagleTreeLikelihoodParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.evomodel.parsers;
+
+import dr.app.beagle.evomodel.branchmodel.BranchModel;
+import dr.app.beagle.evomodel.branchmodel.HomogeneousBranchModel;
+import dr.app.beagle.evomodel.newtreelikelihood.NewBeagleSequenceLikelihood;
+import dr.app.beagle.evomodel.sitemodel.GammaSiteRateModel;
+import dr.app.beagle.evomodel.substmodel.FrequencyModel;
+import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
+import dr.app.beagle.evomodel.treelikelihood.PartialsRescalingScheme;
+import dr.evolution.alignment.PatternList;
+import dr.evolution.alignment.Patterns;
+import dr.evolution.tree.Tree;
+import dr.evolution.util.TaxonList;
+import dr.evomodel.branchratemodel.BranchRateModel;
+import dr.evomodel.tree.TreeModel;
+import dr.evomodel.treelikelihood.TipStatesModel;
+import dr.inference.model.CompoundLikelihood;
+import dr.inference.model.Likelihood;
+import dr.inference.model.Parameter;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author Marc Suchard
+ * @version $Id$
+ */
+public class NewBeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
+
+    public static final String BEAGLE_INSTANCE_COUNT = "beagle.instance.count";
+
+    public static final String TREE_LIKELIHOOD = "newTreeLikelihood";
+    public static final String USE_AMBIGUITIES = "useAmbiguities";
+    public static final String INSTANCE_COUNT = "instanceCount";
+    public static final String SCALING_SCHEME = "scalingScheme";
+    public static final String PARTIALS_RESTRICTION = "partialsRestriction";
+
+    public String getParserName() {
+        return TREE_LIKELIHOOD;
+    }
+
+    protected NewBeagleSequenceLikelihood createTreeLikelihood(PatternList patternList, TreeModel treeModel,
+                                                        BranchModel branchModel,
+                                                        GammaSiteRateModel siteRateModel,
+                                                        BranchRateModel branchRateModel,
+                                                        TipStatesModel tipStatesModel,
+                                                        boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
+                                                        Map<Set<String>, Parameter> partialsRestrictions,
+                                                        XMLObject xo) throws XMLParseException {
+        return new NewBeagleSequenceLikelihood(
+                patternList,
+                treeModel,
+                branchModel,
+                siteRateModel,
+                branchRateModel,
+                tipStatesModel,
+                useAmbiguities,
+                scalingScheme,
+                partialsRestrictions
+        );
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
+        int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
+        if (instanceCount < 1) {
+            instanceCount = 1;
+        }
+
+        String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
+        if (ic != null && ic.length() > 0) {
+            instanceCount = Integer.parseInt(ic);
+        }
+
+        PatternList patternList = (PatternList) xo.getChild(PatternList.class);
+        TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+        GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
+
+        FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
+
+        BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
+        if (branchModel == null) {
+            SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
+            if (substitutionModel == null) {
+                substitutionModel = siteRateModel.getSubstitutionModel();
+            }
+            if (substitutionModel == null) {
+                throw new XMLParseException("No substitution model available for TreeLikelihood: "+xo.getId());
+            }
+            branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
+        }
+
+        BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
+
+        TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
+
+        PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
+        if (xo.hasAttribute(SCALING_SCHEME)) {
+            scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
+            if (scalingScheme == null)
+                throw new XMLParseException("Unknown scaling scheme '"+xo.getStringAttribute(SCALING_SCHEME)+"' in "+
+                        "OldBeagleTreeLikelihood object '"+xo.getId());
+
+        }
+
+        Map<Set<String>, Parameter> partialsRestrictions = null;
+
+        if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
+            XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
+            TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
+//            Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
+            try {
+                Tree.Utils.getLeavesForTaxa(treeModel, taxonList);
+            } catch (Tree.MissingTaxonException e) {
+                throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
+            }
+            throw new XMLParseException("Restricting internal nodes is not yet implemented.  Contact Marc");
+
+        }
+
+
+        if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
+            return createTreeLikelihood(
+                    patternList,
+                    treeModel,
+                    branchModel,
+                    siteRateModel,
+                    branchRateModel,
+                    tipStatesModel,
+                    useAmbiguities,
+                    scalingScheme,
+                    partialsRestrictions,
+                    xo
+            );
+        }
+
+        // using multiple instances of BEAGLE...
+
+//        if (!(patternList instanceof SitePatterns)) {
+//            throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning.");
+//        }
+
+        if (tipStatesModel != null) {
+            throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
+        }
+
+        List<Likelihood> likelihoods = new ArrayList<Likelihood>();
+        for (int i = 0; i < instanceCount; i++) {
+
+            Patterns subPatterns = new Patterns(patternList, i, instanceCount);
+
+            NewBeagleSequenceLikelihood treeLikelihood = createTreeLikelihood(
+                    subPatterns,
+                    treeModel,
+                    branchModel,
+                    siteRateModel,
+                    branchRateModel,
+                    null,
+                    useAmbiguities,
+                    scalingScheme,
+                    partialsRestrictions,
+                    xo);
+            treeLikelihood.setId(xo.getId() + "_" + instanceCount);
+            likelihoods.add(treeLikelihood);
+        }
+
+        return new CompoundLikelihood(likelihoods);
+    }
+
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public String getParserDescription() {
+        return "This element represents the likelihood of a patternlist on a tree given the site model.";
+    }
+
+    public Class getReturnType() {
+        return Likelihood.class;
+    }
+
+    public static final XMLSyntaxRule[] rules = {
+            AttributeRule.newBooleanRule(USE_AMBIGUITIES, true),
+            new ElementRule(PatternList.class),
+            new ElementRule(TreeModel.class),
+            new ElementRule(GammaSiteRateModel.class),
+            new ElementRule(BranchModel.class, true),
+            new ElementRule(SubstitutionModel.class, true),
+            new ElementRule(BranchRateModel.class, true),
+            new ElementRule(TipStatesModel.class, true),
+            AttributeRule.newStringRule(SCALING_SCHEME,true),
+            new ElementRule(PARTIALS_RESTRICTION, new XMLSyntaxRule[] {
+                    new ElementRule(TaxonList.class),
+                    new ElementRule(Parameter.class),
+            }, true),
+            new ElementRule(TipStatesModel.class, true),
+            new ElementRule(FrequencyModel.class, true),
+    };
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+}
diff --git a/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
index 81f1eb4..545c21f 100644
--- a/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
@@ -34,7 +34,7 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.math.matrixAlgebra.Vector;
 
-import java.util.Arrays;
+import java.util.*;
 
 /**
  * @author Marc Suchard
@@ -46,6 +46,28 @@ public class ComplexSubstitutionModel extends GeneralSubstitutionModel implement
         probability = new double[stateCount * stateCount];
     }
 
+    @Override
+    protected void setupDimensionNames(int relativeTo) {
+        List<String> rateNames = new ArrayList<String>();
+
+        String ratePrefix = ratesParameter.getParameterName();
+
+        for (int i = 0; i < dataType.getStateCount(); ++i) {
+            for (int j = i + 1; j < dataType.getStateCount(); ++j) {
+                rateNames.add(getDimensionString(i, j, ratePrefix));
+            }
+        }
+
+        for (int j = 0; j < dataType.getStateCount(); ++j) {
+            for (int i = j + 1; i < dataType.getStateCount(); ++i) {
+                rateNames.add(getDimensionString(i, j, ratePrefix));
+            }
+        }
+
+        String[] tmp = new String[0];
+        ratesParameter.setDimensionNames(rateNames.toArray(tmp));
+    }
+
     protected EigenSystem getDefaultEigenSystem(int stateCount) {
         return new ComplexColtEigenSystem(stateCount);
     }
@@ -211,6 +233,10 @@ public class ComplexSubstitutionModel extends GeneralSubstitutionModel implement
         System.err.println(new Vector(probability));
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
 
     @Override
     public boolean isUsed() {
diff --git a/src/dr/app/beagle/evomodel/substmodel/EmpiricalCodonModel.java b/src/dr/app/beagle/evomodel/substmodel/EmpiricalCodonModel.java
index 533d8bc..12f4cd3 100644
--- a/src/dr/app/beagle/evomodel/substmodel/EmpiricalCodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/EmpiricalCodonModel.java
@@ -31,10 +31,6 @@ import dr.evolution.datatype.AminoAcids;
 import dr.evolution.datatype.Codons;
 import dr.evolution.datatype.GeneticCode;
 import dr.evolution.datatype.Nucleotides;
-import dr.app.beagle.evomodel.substmodel.BaseSubstitutionModel;
-import dr.app.beagle.evomodel.substmodel.DefaultEigenSystem;
-import dr.app.beagle.evomodel.substmodel.EigenSystem;
-import dr.app.beagle.evomodel.substmodel.FrequencyModel;
 import dr.app.beagle.evomodel.parsers.EmpiricalCodonModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/app/beagle/evomodel/substmodel/GeneralSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/GeneralSubstitutionModel.java
index ecafcec..cee4c34 100644
--- a/src/dr/app/beagle/evomodel/substmodel/GeneralSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/GeneralSubstitutionModel.java
@@ -29,6 +29,9 @@ import dr.evolution.datatype.DataType;
 import dr.inference.model.Parameter;
 import dr.inference.model.DuplicatedParameter;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * <b>A general model of sequence substitution</b>. A general reversible class for any
  * data type.
@@ -66,6 +69,8 @@ public class GeneralSubstitutionModel extends BaseSubstitutionModel {
             if (!(ratesParameter instanceof DuplicatedParameter))
                 ratesParameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0,
                         ratesParameter.getDimension()));
+
+            setupDimensionNames(relativeTo);
         }
         setRatesRelativeTo(relativeTo);
     }
@@ -103,6 +108,35 @@ public class GeneralSubstitutionModel extends BaseSubstitutionModel {
         }
     }
 
+    protected void setupDimensionNames(int relativeTo) {
+        List<String> rateNames = new ArrayList<String>();
+
+        String ratePrefix = ratesParameter.getParameterName();
+
+        int index = 0;
+
+        for (int i = 0; i < dataType.getStateCount(); ++i) {
+            for (int j = i + 1; j < dataType.getStateCount(); ++j) {
+                if (index != relativeTo) {
+                    rateNames.add(getDimensionString(i, j, ratePrefix));
+                }
+            }
+            index++;
+        }
+
+        String[] tmp = new String[0];
+        ratesParameter.setDimensionNames(rateNames.toArray(tmp));
+    }
+
+    protected String getDimensionString(int i, int j, String prefix) {
+        String codes =  dataType.getCode(i) + "." + dataType.getCode(j);
+        if (prefix == null) {
+            return codes;
+        } else {
+            return prefix + "." + codes;
+        }
+    }
+
     /**
      * set which rate the others are relative to
      */
diff --git a/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java b/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
index 48061ff..07ebd5d 100644
--- a/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
@@ -40,8 +40,8 @@ public class MG94CodonModel extends AbstractCodonModel {
     protected Parameter alphaParameter;
     protected Parameter betaParameter;
 
-    private final int numSynTransitions;
-    private final int numNonsynTransitions;
+    protected final int numSynTransitions;
+    protected final int numNonsynTransitions;
 
     public MG94CodonModel(Codons codonDataType, Parameter alphaParameter, Parameter betaParameter,
                           FrequencyModel freqModel) {
@@ -80,11 +80,11 @@ public class MG94CodonModel extends AbstractCodonModel {
         return count;
     }
 
-    private int getNumSynTransitions() {
+    protected int getNumSynTransitions() {
         return 2 * countRates(1, 2);
     }
 
-    private int getNumNonsynTransitions() {
+    protected int getNumNonsynTransitions() {
         return 2 * countRates(3, 4);
     }
 
diff --git a/src/dr/app/beagle/evomodel/substmodel/MG94HKYCodonModel.java b/src/dr/app/beagle/evomodel/substmodel/MG94HKYCodonModel.java
index c59dc69..6f10257 100644
--- a/src/dr/app/beagle/evomodel/substmodel/MG94HKYCodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/MG94HKYCodonModel.java
@@ -77,8 +77,9 @@ public class MG94HKYCodonModel extends MG94CodonModel {
 
     protected void setupRelativeRates(double[] rates) {
 
-        double alpha = getAlpha();
-        double beta = getBeta();
+        double alpha = getAlpha() / numSynTransitions;
+        double beta = getBeta() / numNonsynTransitions;
+
         double kappa = getKappa();
         for (int i = 0; i < rateCount; i++) {
             switch (rateMap[i]) {
diff --git a/src/dr/app/beagle/evomodel/substmodel/PCACodonModel.java b/src/dr/app/beagle/evomodel/substmodel/PCACodonModel.java
index 1971a0e..82a44d5 100644
--- a/src/dr/app/beagle/evomodel/substmodel/PCACodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/PCACodonModel.java
@@ -26,10 +26,6 @@
 package dr.app.beagle.evomodel.substmodel;
 
 import dr.evolution.datatype.Codons;
-import dr.app.beagle.evomodel.substmodel.BaseSubstitutionModel;
-import dr.app.beagle.evomodel.substmodel.DefaultEigenSystem;
-import dr.app.beagle.evomodel.substmodel.EigenSystem;
-import dr.app.beagle.evomodel.substmodel.FrequencyModel;
 import dr.app.beagle.evomodel.parsers.PCACodonModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
index e654730..8c8a1c5 100644
--- a/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
@@ -25,10 +25,12 @@
 
 package dr.app.beagle.evomodel.substmodel;
 
-import dr.evomodel.substmodel.*;
 import dr.inference.model.*;
 import dr.evolution.datatype.DataType;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * @author Marc Suchard
  */
@@ -45,6 +47,8 @@ public class SVSComplexSubstitutionModel extends ComplexSubstitutionModel implem
             this.indicatorsParameter  = indicatorsParameter;
             addVariable(indicatorsParameter);
         }
+
+        setupIndicatorDimensionNames(-1);
     }
 
     @Override
@@ -54,6 +58,27 @@ public class SVSComplexSubstitutionModel extends ComplexSubstitutionModel implem
         }
     }
 
+    protected void setupIndicatorDimensionNames(int relativeTo) {
+        List<String> indicatorNames = new ArrayList<String>();
+
+        String indicatorPrefix = indicatorsParameter.getParameterName();
+
+        for (int i = 0; i < dataType.getStateCount(); ++i) {
+            for (int j = i + 1; j < dataType.getStateCount(); ++j) {
+                indicatorNames.add(getDimensionString(i, j, indicatorPrefix));
+            }
+        }
+
+        for (int j = 0; j < dataType.getStateCount(); ++j) {
+            for (int i = j + 1; i < dataType.getStateCount(); ++i) {
+                indicatorNames.add(getDimensionString(i, j, indicatorPrefix));
+            }
+        }
+
+        String[] tmp = new String[0];
+        indicatorsParameter.setDimensionNames(indicatorNames.toArray(tmp));
+    }
+
     public Parameter getIndicators() {
         return indicatorsParameter;
     }
diff --git a/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
index f058107..8bea2ab 100644
--- a/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
@@ -30,6 +30,8 @@ import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 import dr.evolution.datatype.DataType;
 
+import java.util.*;
+
 /**
  * @author Marc Suchard
  */
@@ -43,11 +45,13 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
 
         if (indicatorsParameter == null) {
             this.indicatorsParameter = new Parameter.Default(ratesParameter.getDimension(), 1.0);
+            System.err.println("HERE AA");
         } else {
             this.indicatorsParameter  = indicatorsParameter;
             addVariable(indicatorsParameter);
         }
 
+        setupIndicatorDimensionNames(-1);
     }
 
     @Override
@@ -57,6 +61,21 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
         }
     }
 
+    protected void setupIndicatorDimensionNames(int relativeTo) {
+        List<String> indicatorNames = new ArrayList<String>();
+
+        String indicatorPrefix = indicatorsParameter.getParameterName();
+
+        for (int i = 0; i < dataType.getStateCount(); ++i) {
+            for (int j = i + 1; j < dataType.getStateCount(); ++j) {
+                indicatorNames.add(getDimensionString(i, j, indicatorPrefix));
+            }
+        }
+
+        String[] tmp = new String[0];
+        indicatorsParameter.setDimensionNames(indicatorNames.toArray(tmp));
+    }
+
     public Parameter getIndicators() {
         return indicatorsParameter;
     }
@@ -117,6 +136,11 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
     }
 
     @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
+    @Override
     public boolean isUsed() {
         return super.isUsed() && isUsed;
     }
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
index ecbdb4f..d9d516c 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
@@ -755,18 +755,21 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
             recomputeScaleFactors = true;
         } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC && everUnderflowed) {
             useScaleFactors = true;
+
+            if (rescalingCount > rescalingFrequency) {
+                rescalingCount = 0;
+                rescalingCountInner = 0;
+            }
+
             if (rescalingCountInner < RESCALE_TIMES) {
                 recomputeScaleFactors = true;
-                makeDirty();
+                updateAllNodes();
+//                makeDirty();
 //                System.err.println("Recomputing scale factors");
+                rescalingCountInner++;
             }
 
-            rescalingCountInner++;
             rescalingCount++;
-            if (rescalingCount > rescalingFrequency) {
-                rescalingCount = 0;
-                rescalingCountInner = 0;
-            }
         } else if (this.rescalingScheme == PartialsRescalingScheme.DELAYED && everUnderflowed) {
             useScaleFactors = true;
             recomputeScaleFactors = true;
@@ -900,6 +903,8 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
 
                     branchUpdateCount = 0;
 
+                    updateAllNodes();
+
                     if (hasRestrictedPartials) {
                         for (int i = 0; i <= numRestrictedPartials; i++) {
                             operationCount[i] = 0;
diff --git a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java b/src/dr/app/beagle/evomodel/treelikelihood/EvolutionaryProcessDelegate.java
similarity index 69%
copy from src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
copy to src/dr/app/beagle/evomodel/treelikelihood/EvolutionaryProcessDelegate.java
index 4de2a8e..8033f8d 100644
--- a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/EvolutionaryProcessDelegate.java
@@ -1,5 +1,5 @@
 /*
- * CoalescentIntervalProvider.java
+ * EvolutionaryProcessDelegate.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,21 +23,22 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.evomodel.coalescent;
+package dr.app.beagle.evomodel.treelikelihood;
 
-import dr.evolution.coalescent.IntervalType;
+import beagle.Beagle;
 
 /**
- * @author Guy Baele
- * @author Marc Suchard
+ * Created by msuchard on 12/23/15.
  */
-public interface CoalescentIntervalProvider {
+public interface EvolutionaryProcessDelegate {
 
-    public int getCoalescentIntervalDimension();
+    int getMatrixIndex(int branchIndex);
 
-    public double getCoalescentInterval(int i);
+    void updateSubstitutionModels(Beagle beagle);
 
-    public int getCoalescentIntervalLineageCount(int i);
+    void updateTransitionMatrices(Beagle beagle, int[] branchIndices, double[] edgeLength, int updateCount);
 
-    public IntervalType getCoalescentIntervalType(int i);
+    void storeState();
+
+    void restoreState();
 }
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/SubstitutionModelDelegate.java b/src/dr/app/beagle/evomodel/treelikelihood/SubstitutionModelDelegate.java
index 7b71686..e7377c8 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/SubstitutionModelDelegate.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/SubstitutionModelDelegate.java
@@ -44,7 +44,7 @@ import java.util.List;
  * @author Marc A. Suchard
  * @version $Id$
  */
-public final class SubstitutionModelDelegate implements Serializable {
+public final class SubstitutionModelDelegate implements EvolutionaryProcessDelegate, Serializable {
 
     private static final boolean DEBUG = false;
     private static final boolean RUN_IN_SERIES = false;
diff --git a/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java b/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
index 83bc345..350bfe2 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
@@ -42,24 +42,18 @@ public class MassivelyParallelMDSImpl implements MultiDimensionalScalingCore {
 
     private NativeMDSSingleton singleton = null;
     private int instance = -1; // Get instance # via initialization
-    private final long flags = 0;
 
-    private static final long LEFT_TRUNCATION = 1 << 5;
+//    private static final long LEFT_TRUNCATION = 1 << 5;
 
     public MassivelyParallelMDSImpl() {
         singleton = NativeMDSSingleton.loadLibrary();
     }
 
     @Override
-    public void initialize(int embeddingDimension, int locationCount, boolean isLeftTruncated) {
-        long flags = this.flags;
-        if (isLeftTruncated) {
-            flags |= LEFT_TRUNCATION;
-        }
-
+    public void initialize(int embeddingDimension, int locationCount, long flags) {
+        this.isLeftTruncated = (flags & LEFT_TRUNCATION) != 0;
         instance = singleton.initialize(embeddingDimension, locationCount, flags);
         this.observationCount = (locationCount * (locationCount - 1)) / 2;
-        this.isLeftTruncated = isLeftTruncated;
     }
 
     @Override
@@ -82,7 +76,9 @@ public class MassivelyParallelMDSImpl implements MultiDimensionalScalingCore {
     public double calculateLogLikelihood() {
         double sumOfSquaredResiduals = singleton.getSumOfSquaredResiduals(instance);
 
-        double logLikelihood = (0.5 * Math.log(precision) * observationCount) -
+        // TODO Missing - n / 2 * log(2 * pi)
+
+        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount -
                         (0.5 * precision * sumOfSquaredResiduals);
 
         if (isLeftTruncated) {
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
index 80b2903..0a9d09d 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
@@ -35,10 +35,17 @@ package dr.app.beagle.multidimensionalscaling;
 
 public interface MultiDimensionalScalingCore {
 
+    public static final long USE_NATIVE_MDS = 1 << 0;       // 1
+
+    public static final long SINGLE_PRECISION = 1 << 2;     // 4
+    public static final long MULTI_CORE = 1 << 3;           // 8
+    public static final long OPENCL_VECTORIZATION = 1 << 4; // 16
+    public static final long LEFT_TRUNCATION = 1 << 5;      // 32
+
     /**
      * initializes arrays.
      */
-    void initialize(int embeddingDimension, int locationCount, boolean isLeftTruncated);
+    void initialize(int embeddingDimension, int locationCount, long flags);
 
     /**
      * sets the observation data
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl.java
index 505f6d3..0c600aa 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl.java
@@ -39,252 +39,252 @@ package dr.app.beagle.multidimensionalscaling;
  * $LastChangedRevision$
  */
 
-public class MultiDimensionalScalingCoreImpl implements MultiDimensionalScalingCore {
-    private static final boolean USE_CACHING = true;
-
-    @Override
-    public void initialize(int embeddingDimension, int locationCount, boolean isLeftTruncated) {
-        this.embeddingDimension = embeddingDimension;
-        this.locationCount = locationCount;
-        this.observationCount = (locationCount * (locationCount - 1)) / 2;
-        this.isLeftTruncated = isLeftTruncated;
-
-        observations = new double[locationCount][locationCount];
-        squaredResiduals = new double[locationCount * locationCount];
-        storedSquaredResiduals = new double[locationCount * locationCount];
-        residualsKnown = false;
-        sumOfSquaredResidualsKnown = false;
-
-        locationUpdated = new boolean[locationCount];
-        for (int i = 0; i < locationUpdated.length; i++) {
-            locationUpdated[i] = true;
-        }
-
-        locations = new double[locationCount][embeddingDimension];
-        storedLocations = new double[locationCount][embeddingDimension];
-    }
-
-    @Override
-    public void setPairwiseData(double[] observations) {
-        if (observations.length != (locationCount * locationCount)) {
-            throw new RuntimeException("Observation data is not the correct dimension");
-        }
-
-        int k = 0;
-        for (int i = 0; i < locationCount; i++) {
-            System.arraycopy(observations, k, this.observations[i], 0, locationCount);
-            k += locationCount;
-        }
-    }
-
-    @Override
-    public void setParameters(double[] parameters) {
-        precision = parameters[0];
-    }
-
-    @Override
-    public void updateLocation(int locationIndex, double[] location) {
-        if (USE_CACHING && locationUpdateCount != -1) {
-            if (locationUpdateCount > 1) {
-                throw new RuntimeException("Cannot change more than one location per step with caching on");
-            }
-            locationUpdateCount += 1;
-        }
-
-        if (location.length != embeddingDimension) {
-            throw new RuntimeException("Location is not the correct dimension");
-        }
-
-        System.arraycopy(location, 0, locations[locationIndex], 0, embeddingDimension);
-        locationUpdated[locationIndex] = true;
-        sumOfSquaredResidualsKnown = false;
-    }
-
-    @Override
-    public double calculateLogLikelihood() {
-        if (!sumOfSquaredResidualsKnown) {
-            if (USE_CACHING) {
-                if (!residualsKnown) {
-                    computeSumOfSquaredResiduals();
-                } else {
-                    updateSumOfSquaredResiduals();
-                }
-            } else {
-                computeSumOfSquaredResiduals();
-            }
-            sumOfSquaredResidualsKnown = true;
-        }
-
-        double logLikelihood = (0.5 * Math.log(precision) * observationCount) -
-                (0.5 * precision * sumOfSquaredResiduals);
-
-        if (isLeftTruncated) {
-            throw new UnsupportedOperationException("Truncations not implemented");
-//                if (!truncationsKnown) {
-//                    calculateTruncations(precision);
-//                }
-//                truncationSum = calculateTruncationSum();
-//                logLikelihood -= truncationSum;
-        }
-
-        for (int i = 0; i < locationUpdated.length; i++) {
-            locationUpdated[i] = false;
-        }
-
-        return logLikelihood;
-    }
-
-    @Override
-    public void storeState() {
-        storedSumOfSquaredResiduals = sumOfSquaredResiduals;
-        for (int i = 0; i < locationCount; i++) {
-            System.arraycopy(locations[i], 0 , storedLocations[i], 0, embeddingDimension);
-        }
-        System.arraycopy(squaredResiduals, 0 , storedSquaredResiduals, 0, locationCount * locationCount);
-
-        storedPrecision = precision;
-
-        locationUpdateCount = 0;
-    }
-
-    @Override
-    public void restoreState() {
-        sumOfSquaredResiduals = storedSumOfSquaredResiduals;
-        sumOfSquaredResidualsKnown = true;
-
-        double[] tmp = storedSquaredResiduals;
-        storedSquaredResiduals = squaredResiduals;
-        squaredResiduals = tmp;
-
-        double[][] tmp1 = storedLocations;
-        storedLocations = locations;
-        locations = tmp1;
-
-        precision = storedPrecision;
-
-        residualsKnown = true;
-    }
-
-    @Override
-    public void makeDirty() {
-        sumOfSquaredResidualsKnown = false;
-        residualsKnown = false;
-
-
-    }
-
-    @Override
-    public void acceptState() {
-        // Do nothing
-    }
-
-    protected void computeSumOfSquaredResiduals() {
-        sumOfSquaredResiduals = 0.0;
-        for (int i = 0; i < locationCount; i++) {
-
-            for (int j = 0; j < locationCount; j++) {
-                double distance = calculateDistance(locations[i], locations[j]);
-                double residual = distance - observations[i][j];
-                double squaredResidual = residual * residual;
-                squaredResiduals[i * locationCount + j] = squaredResidual;
-                squaredResiduals[j * locationCount + i] = squaredResidual;
-                sumOfSquaredResiduals += squaredResidual;
-            }
-        }
-
-        sumOfSquaredResiduals /= 2;
-
-        residualsKnown = true;
-        sumOfSquaredResidualsKnown = true;
-    }
-
-    protected void updateSumOfSquaredResiduals() {
-        double delta = 0.0;
-
-        for (int i = 0; i < locationCount; i++) {
-            if (locationUpdated[i]) {
-
-                // if location i is updated, calculate the residuals to all js
-                // also sum the change in sum residual
-                for (int j = 0; j < locationCount; j++) {
-                    if (i != j) {
-                        double distance = calculateDistance(locations[i], locations[j]);
-                        double residual = distance - observations[i][j];
-                        double squaredResidual = residual * residual;
-
-                        delta += squaredResidual - squaredResiduals[i * locationCount + j];
-
-                        squaredResiduals[i * locationCount + j] = squaredResidual;
-                        squaredResiduals[j * locationCount + i] = squaredResidual;
-                    }
-                }
-            }
-        }
-
-        sumOfSquaredResiduals += delta;
-    }
-
-    protected double calculateDistance(double[] X, double[] Y) {
-        double sum = 0.0;
-        for (int i = 0; i < embeddingDimension; i++) {
-            double difference = X[i] - Y[i];
-            sum += difference * difference;
-        }
-        return Math.sqrt(sum);
-    }
-
-//    protected void calculateTruncations(double precision) {
-//        double sd = 1.0 / Math.sqrt(precision);
-//        for (int i = 0; i < distanceCount; i++) {
-//            if (distanceUpdated[i]) {
-//                truncations[i] = NormalDistribution.cdf(distances[i], 0.0, sd, true);
+//public class MultiDimensionalScalingCoreImpl implements MultiDimensionalScalingCore {
+//    private static final boolean USE_CACHING = true;
+//
+//    @Override
+//    public void initialize(int embeddingDimension, int locationCount, boolean isLeftTruncated) {
+//        this.embeddingDimension = embeddingDimension;
+//        this.locationCount = locationCount;
+//        this.observationCount = (locationCount * (locationCount - 1)) / 2;
+//        this.isLeftTruncated = isLeftTruncated;
+//
+//        observations = new double[locationCount][locationCount];
+//        squaredResiduals = new double[locationCount * locationCount];
+//        storedSquaredResiduals = new double[locationCount * locationCount];
+//        residualsKnown = false;
+//        sumOfSquaredResidualsKnown = false;
+//
+//        locationUpdated = new boolean[locationCount];
+//        for (int i = 0; i < locationUpdated.length; i++) {
+//            locationUpdated[i] = true;
+//        }
+//
+//        locations = new double[locationCount][embeddingDimension];
+//        storedLocations = new double[locationCount][embeddingDimension];
+//    }
+//
+//    @Override
+//    public void setPairwiseData(double[] observations) {
+//        if (observations.length != (locationCount * locationCount)) {
+//            throw new RuntimeException("Observation data is not the correct dimension");
+//        }
+//
+//        int k = 0;
+//        for (int i = 0; i < locationCount; i++) {
+//            System.arraycopy(observations, k, this.observations[i], 0, locationCount);
+//            k += locationCount;
+//        }
+//    }
+//
+//    @Override
+//    public void setParameters(double[] parameters) {
+//        precision = parameters[0];
+//    }
+//
+//    @Override
+//    public void updateLocation(int locationIndex, double[] location) {
+//        if (USE_CACHING && locationUpdateCount != -1) {
+//            if (locationUpdateCount > 1) {
+//                throw new RuntimeException("Cannot change more than one location per step with caching on");
 //            }
+//            locationUpdateCount += 1;
+//        }
+//
+//        if (location.length != embeddingDimension) {
+//            throw new RuntimeException("Location is not the correct dimension");
 //        }
-//        truncationsKnown = true;
+//
+//        System.arraycopy(location, 0, locations[locationIndex], 0, embeddingDimension);
+//        locationUpdated[locationIndex] = true;
+//        sumOfSquaredResidualsKnown = false;
 //    }
 //
-//    protected double calculateTruncationSum() {
-//        double sum = 0.0;
-//        for (int i = 0; i < observationCount; i++) {
-//            int dist = getDistanceIndexForObservation(i);
-//            if (dist != -1) {
-//                sum += truncations[dist];
+//    @Override
+//    public double calculateLogLikelihood() {
+//        if (!sumOfSquaredResidualsKnown) {
+//            if (USE_CACHING) {
+//                if (!residualsKnown) {
+//                    computeSumOfSquaredResiduals();
+//                } else {
+//                    updateSumOfSquaredResiduals();
+//                }
 //            } else {
-//                sum += Math.log(0.5);
+//                computeSumOfSquaredResiduals();
+//            }
+//            sumOfSquaredResidualsKnown = true;
+//        }
+//
+//        double logLikelihood = (0.5 * Math.log(precision) * observationCount) -
+//                (0.5 * precision * sumOfSquaredResiduals);
+//
+//        if (isLeftTruncated) {
+//            throw new UnsupportedOperationException("Truncations not implemented");
+////                if (!truncationsKnown) {
+////                    calculateTruncations(precision);
+////                }
+////                truncationSum = calculateTruncationSum();
+////                logLikelihood -= truncationSum;
+//        }
+//
+//        for (int i = 0; i < locationUpdated.length; i++) {
+//            locationUpdated[i] = false;
+//        }
+//
+//        return logLikelihood;
+//    }
+//
+//    @Override
+//    public void storeState() {
+//        storedSumOfSquaredResiduals = sumOfSquaredResiduals;
+//        for (int i = 0; i < locationCount; i++) {
+//            System.arraycopy(locations[i], 0 , storedLocations[i], 0, embeddingDimension);
+//        }
+//        System.arraycopy(squaredResiduals, 0 , storedSquaredResiduals, 0, locationCount * locationCount);
+//
+//        storedPrecision = precision;
+//
+//        locationUpdateCount = 0;
+//    }
+//
+//    @Override
+//    public void restoreState() {
+//        sumOfSquaredResiduals = storedSumOfSquaredResiduals;
+//        sumOfSquaredResidualsKnown = true;
+//
+//        double[] tmp = storedSquaredResiduals;
+//        storedSquaredResiduals = squaredResiduals;
+//        squaredResiduals = tmp;
+//
+//        double[][] tmp1 = storedLocations;
+//        storedLocations = locations;
+//        locations = tmp1;
+//
+//        precision = storedPrecision;
+//
+//        residualsKnown = true;
+//    }
+//
+//    @Override
+//    public void makeDirty() {
+//        sumOfSquaredResidualsKnown = false;
+//        residualsKnown = false;
+//
+//
+//    }
+//
+//    @Override
+//    public void acceptState() {
+//        // Do nothing
+//    }
+//
+//    protected void computeSumOfSquaredResiduals() {
+//        sumOfSquaredResiduals = 0.0;
+//        for (int i = 0; i < locationCount; i++) {
+//
+//            for (int j = 0; j < locationCount; j++) {
+//                double distance = calculateDistance(locations[i], locations[j]);
+//                double residual = distance - observations[i][j];
+//                double squaredResidual = residual * residual;
+//                squaredResiduals[i * locationCount + j] = squaredResidual;
+//                squaredResiduals[j * locationCount + i] = squaredResidual;
+//                sumOfSquaredResiduals += squaredResidual;
 //            }
 //        }
-//        return sum;
+//
+//        sumOfSquaredResiduals /= 2;
+//
+//        residualsKnown = true;
+//        sumOfSquaredResidualsKnown = true;
 //    }
-
-    private int embeddingDimension;
-    private boolean isLeftTruncated = false;
-    private int locationCount;
-    private int observationCount;
-    private double precision;
-    private double storedPrecision;
-
-    // Prevents more than one location being updated per step. Is initialized
-    // to zero in store().
-    private int locationUpdateCount = -1;
-
-    private double[][] observations;
-    private double[][] locations;
-    private double[][] storedLocations;
-
-    private boolean[] locationUpdated;
-
-    private boolean residualsKnown = false;
-
-    private boolean sumOfSquaredResidualsKnown = false;
-    private double[] squaredResiduals;
-    private double[] storedSquaredResiduals;
-    private double sumOfSquaredResiduals;
-    private double storedSumOfSquaredResiduals;
-
-    private boolean truncationsKnown = false;
-    private double truncationSum;
-    private double storedTruncationSum;
-    private double[] truncations;
-    private double[] storedTruncations;
-
-}
+//
+//    protected void updateSumOfSquaredResiduals() {
+//        double delta = 0.0;
+//
+//        for (int i = 0; i < locationCount; i++) {
+//            if (locationUpdated[i]) {
+//
+//                // if location i is updated, calculate the residuals to all js
+//                // also sum the change in sum residual
+//                for (int j = 0; j < locationCount; j++) {
+//                    if (i != j) {
+//                        double distance = calculateDistance(locations[i], locations[j]);
+//                        double residual = distance - observations[i][j];
+//                        double squaredResidual = residual * residual;
+//
+//                        delta += squaredResidual - squaredResiduals[i * locationCount + j];
+//
+//                        squaredResiduals[i * locationCount + j] = squaredResidual;
+//                        squaredResiduals[j * locationCount + i] = squaredResidual;
+//                    }
+//                }
+//            }
+//        }
+//
+//        sumOfSquaredResiduals += delta;
+//    }
+//
+//    protected double calculateDistance(double[] X, double[] Y) {
+//        double sum = 0.0;
+//        for (int i = 0; i < embeddingDimension; i++) {
+//            double difference = X[i] - Y[i];
+//            sum += difference * difference;
+//        }
+//        return Math.sqrt(sum);
+//    }
+//
+////    protected void calculateTruncations(double precision) {
+////        double sd = 1.0 / Math.sqrt(precision);
+////        for (int i = 0; i < distanceCount; i++) {
+////            if (distanceUpdated[i]) {
+////                truncations[i] = NormalDistribution.cdf(distances[i], 0.0, sd, true);
+////            }
+////        }
+////        truncationsKnown = true;
+////    }
+////
+////    protected double calculateTruncationSum() {
+////        double sum = 0.0;
+////        for (int i = 0; i < observationCount; i++) {
+////            int dist = getDistanceIndexForObservation(i);
+////            if (dist != -1) {
+////                sum += truncations[dist];
+////            } else {
+////                sum += Math.log(0.5);
+////            }
+////        }
+////        return sum;
+////    }
+//
+//    private int embeddingDimension;
+//    private boolean isLeftTruncated = false;
+//    private int locationCount;
+//    private int observationCount;
+//    private double precision;
+//    private double storedPrecision;
+//
+//    // Prevents more than one location being updated per step. Is initialized
+//    // to zero in store().
+//    private int locationUpdateCount = -1;
+//
+//    private double[][] observations;
+//    private double[][] locations;
+//    private double[][] storedLocations;
+//
+//    private boolean[] locationUpdated;
+//
+//    private boolean residualsKnown = false;
+//
+//    private boolean sumOfSquaredResidualsKnown = false;
+//    private double[] squaredResiduals;
+//    private double[] storedSquaredResiduals;
+//    private double sumOfSquaredResiduals;
+//    private double storedSumOfSquaredResiduals;
+//
+//    private boolean truncationsKnown = false;
+//    private double truncationSum;
+//    private double storedTruncationSum;
+//    private double[] truncations;
+//    private double[] storedTruncations;
+//
+//}
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
index c995d7a..dfab459 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
@@ -44,7 +44,7 @@ import dr.math.distributions.NormalDistribution;
 public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScalingCore {
 
     @Override
-    public void initialize(int embeddingDimension, int locationCount, boolean isLeftTruncated) {
+    public void initialize(int embeddingDimension, int locationCount, long flags) {
         this.embeddingDimension = embeddingDimension;
         this.locationCount = locationCount;
         this.observationCount = (locationCount * (locationCount - 1)) / 2;
@@ -56,6 +56,8 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
         residualsKnown = false;
         sumOfSquaredResidualsKnown = false;
 
+        isLeftTruncated = (flags & MultiDimensionalScalingCore.LEFT_TRUNCATION) != 0;
+
         if (isLeftTruncated) {
             truncations = new double[locationCount][locationCount];
             storedTruncations = null;
@@ -153,7 +155,7 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
             sumOfSquaredResidualsKnown = true;
         }
 
-        double logLikelihood = (0.5 * Math.log(precision) * observationCount) -
+        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount -
                 (0.5 * precision * sumOfSquaredResiduals);
 
         if (isLeftTruncated) {
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
index e718e21..fc9506e 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
@@ -32,6 +32,8 @@ import dr.xml.*;
 
 import java.io.FileReader;
 import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * @author Andrew Rambaut
@@ -40,7 +42,7 @@ import java.io.IOException;
  */
 public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
 
-    public static final String NATIVE_MDS = "native_mds";
+    public static final String REQUIRED_FLAGS_PROPERTY = "mds.required.flags";
 
     public enum ObservationType {
         POINT,
@@ -51,13 +53,14 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
 
     public final static String MULTIDIMENSIONAL_SCALING_LIKELIHOOD = "multiDimensionalScalingLikelihood";
 
-    public MultiDimensionalScalingLikelihood(
-            int mdsDimension,
-            Parameter mdsPrecision,
-            MatrixParameter locationsParameter,
-            DataTable<double[]> dataTable) {
-        this(mdsDimension, mdsPrecision, locationsParameter, dataTable, false);
-    }
+//    public MultiDimensionalScalingLikelihood(
+//            int mdsDimension,
+//            Parameter mdsPrecision,
+//            MatrixParameter locationsParameter,
+//            DataTable<double[]> dataTable,
+//            boolean reorderData) {
+//        this(mdsDimension, mdsPrecision, locationsParameter, dataTable, false, reorderData);
+//    }
 
     /**
      * A simple constructor for a fully specified symmetrical data matrix
@@ -65,14 +68,16 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
      * @param mdsPrecision
      * @param locationsParameter
      * @param dataTable
-     * @param includeTruncation
+     * @param isLeftTruncated
+     * @param reorderData
      */
     public MultiDimensionalScalingLikelihood(
             int mdsDimension,
             Parameter mdsPrecision,
-            MatrixParameter locationsParameter,
+            MatrixParameterInterface locationsParameter,
             DataTable<double[]> dataTable,
-            boolean isLeftTruncated) {
+            boolean isLeftTruncated,
+            boolean reorderData) {
 
         super(MULTIDIMENSIONAL_SCALING_LIKELIHOOD);
 
@@ -80,12 +85,24 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         this.isLeftTruncated = isLeftTruncated;
 
         // construct a compact data table
-        String[] rowLabels = dataTable.getRowLabels();
-        String[] columnLabels = dataTable.getRowLabels();
+        String[] rowLabelsOriginal = dataTable.getRowLabels();
+//        String[] columnLabels = dataTable.getRowLabels();
 
         int rowCount = dataTable.getRowCount();
         locationCount = rowCount;
 
+        int[] permute = null;
+        if (reorderData) {
+            permute = getPermutation(rowLabelsOriginal, locationsParameter);
+        } else {
+            permute = new int[locationCount];
+            for (int i = 0; i < locationCount; ++i) {
+                permute[i] = i; // identity
+            }
+        }
+
+        String[] rowLabels = new String[locationCount];
+
         int observationCount = rowCount * rowCount;
         double[] observations = new double[observationCount];
         ObservationType[] observationTypes = new ObservationType[observationCount];
@@ -93,10 +110,12 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         double[][] tmp = new double[rowCount][rowCount];
 
         for (int i = 0; i < rowCount; i++) {
-            double[] dataRow = dataTable.getRow(i);
+            rowLabels[i] = rowLabelsOriginal[permute[i]];
+
+            double[] dataRow = dataTable.getRow(permute[i]);
 
             for (int j = i + 1; j < rowCount; j++) {
-                tmp[i][j] = tmp[j][i] = dataRow[j];
+                tmp[i][j] = tmp[j][i] = dataRow[permute[j]];
             }
         }
 
@@ -107,29 +126,60 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
                 observationTypes[u] = ObservationType.POINT;
                 u++;
             }
-
         }
 
         initialize(mdsDimension, mdsPrecision, isLeftTruncated, locationsParameter,
                 rowLabels, observations, observationTypes);
     }
 
+    private int[] getPermutation(String[] source, MatrixParameterInterface destination) {
+
+        if (source.length != destination.getColumnDimension()) {
+            throw new IllegalArgumentException("Dimension mismatch");
+        }
+
+        final int length = source.length;
+
+        Map<String,Integer> map = new HashMap<String, Integer>(destination.getColumnDimension());
+        for (int i = 0; i < length; ++i) {
+            map.put(source[i],i);
+        }
+
+        int[] permute = new int[length];
+        for (int i = 0; i < length; ++i) {
+            Integer p = map.get(destination.getParameter(i).getParameterName());
+            if (p == null) {
+                throw new IllegalArgumentException("Missing label");
+            }
+            permute[i] = p;
+        }
+
+        return permute;
+    }
+
     private MultiDimensionalScalingCore getCore() {
-        int computeMode = 0;
-        String r = System.getProperty(NATIVE_MDS);
+        long computeMode = 0;
+        String r = System.getProperty(REQUIRED_FLAGS_PROPERTY);
         if (r != null) {
-            computeMode = Integer.parseInt(r.trim());
+            computeMode = Long.parseLong(r.trim());
         }
 
         MultiDimensionalScalingCore core;
-        switch (computeMode) {
-            case 1:
-                System.err.println("Attempting to use a native MDS core; may the force be with you ....");
-                core = new MassivelyParallelMDSImpl();
-                break;
-            default:
-                core = new MultiDimensionalScalingCoreImpl2();
+        if (computeMode > 0) {
+            System.err.println("Attempting to use a native MDS core with flag: " + computeMode + "; may the force be with you ....");
+            core = new MassivelyParallelMDSImpl();
+            flags = computeMode;
+        } else {
+            core = new MultiDimensionalScalingCoreImpl2();
         }
+//        switch (computeMode) {
+//            case 1:
+//                System.err.println("Attempting to use a native MDS core; may the force be with you ....");
+//                core = new MassivelyParallelMDSImpl();
+//                break;
+//            default:
+//                core = new MultiDimensionalScalingCoreImpl2();
+//        }
         return core;
     }
 
@@ -137,13 +187,20 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
             final int mdsDimension,
             final Parameter mdsPrecision,
             final boolean isLeftTruncated,
-            final MatrixParameter locationsParameter,
+            final MatrixParameterInterface locationsParameter,
             final String[] locationLabels,
             final double[] observations,
             final ObservationType[] observationTypes) {
 
         this.mdsCore = getCore();
-        this.mdsCore.initialize(mdsDimension, locationCount, isLeftTruncated);
+
+        if (isLeftTruncated) {
+            flags |= MultiDimensionalScalingCore.LEFT_TRUNCATION;
+        }
+
+        System.err.println("Initializing with flags: " + flags);
+
+        this.mdsCore.initialize(mdsDimension, locationCount, flags);
         this.locationLabels = locationLabels;
 
         this.locationsParameter = locationsParameter;
@@ -155,31 +212,43 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
 
         mdsCore.setParameters(mdsPrecisionParameter.getParameterValues());
         mdsCore.setPairwiseData(observations);
-        for (int i = 0; i < locationCount; i++) {
-            mdsCore.updateLocation(i, locationsParameter.getColumnValues(i));
-        }
+//        for (int i = 0; i < locationCount; i++) {
+//            mdsCore.updateLocation(i, locationsParameter.getColumnValues(i));
+//        }
+        mdsCore.updateLocation(-1, locationsParameter.getParameterValues());
 
         // make sure everything is calculated on first evaluation
         makeDirty();
     }
 
-    protected void setupLocationsParameter(MatrixParameter locationsParameter) {
-        if (locationsParameter.getColumnDimension() > 0){
+    protected void setupLocationsParameter(MatrixParameterInterface locationsParameter) {
+        final boolean exisitingParameter = locationsParameter.getColumnDimension() > 0;
+
+        if (exisitingParameter){
             if (locationsParameter.getColumnDimension() != locationCount){
                 throw new RuntimeException("locationsParameter column dimension ("+locationsParameter.getColumnDimension()+") is not equal to the locationCount ("+locationCount+")");
             }
             if (locationsParameter.getRowDimension() != mdsDimension){
                 throw new RuntimeException("locationsParameter row dimension ("+locationsParameter.getRowDimension()+") is not equal to the mdsDimension ("+mdsDimension+")");
             }
-        } else{
-            locationsParameter.setColumnDimension(mdsDimension);
-            locationsParameter.setRowDimension(locationCount);
+        } else {
+//            locationsParameter.setColumnDimension(mdsDimension);
+//            locationsParameter.setRowDimension(locationCount);
+            throw new IllegalArgumentException("Dimensions on matrix must be set");
         }
+
         for (int i = 0; i < locationLabels.length; i++) {
-            locationsParameter.getParameter(i).setId(locationLabels[i]);
+            if (exisitingParameter) {
+                if (locationsParameter.getParameter(i).getParameterName().compareTo(locationLabels[i]) != 0) {
+                    throw new RuntimeException("Mismatched trait parameter name (" + locationsParameter.getParameter(i).getParameterName() +
+                            ") and data dimension name (" + locationLabels[i] + ")");
+                }
+            } else {
+                locationsParameter.getParameter(i).setId(locationLabels[i]);
+            }
         }
 
-        for (int i = 0; i < locationsParameter.getParameterCount(); ++i) {
+        for (int i = 0; i < locationsParameter.getColumnDimension(); ++i) {
             Parameter param = locationsParameter.getParameter(i);
             try {
                 if (param.getBounds() != null) {
@@ -193,8 +262,7 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
     }
 
     @Override
-    protected void handleModelChangedEvent(Model model, Object object, int index) {
-    }
+    protected void handleModelChangedEvent(Model model, Object object, int index) { }
 
     @Override
     protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
@@ -268,6 +336,7 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         public static final String MDS_PRECISION = "mdsPrecision";
         public static final String INCLUDE_TRUNCATION = "includeTruncation";
         public static final String USE_OLD = "useOld";
+        public static final String FORCE_REORDER = "forceReorder";
 
         public String getParserName() {
             return MULTIDIMENSIONAL_SCALING_LIKELIHOOD;
@@ -289,7 +358,7 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
 
             int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
 
-            MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
+            MatrixParameterInterface locationsParameter = (MatrixParameterInterface) xo.getElementFirstChild(LOCATIONS);
 
             Parameter mdsPrecision = (Parameter) xo.getElementFirstChild(MDS_PRECISION);
 
@@ -297,12 +366,14 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
 
             boolean includeTrauncation = xo.getAttribute(INCLUDE_TRUNCATION, false);
 
+            boolean forceReorder = xo.getAttribute(FORCE_REORDER, false);
+
             if (useOld) {
                 System.err.println("USE OLD");
-                return new MultidimensionalScalingLikelihood(mdsDimension, includeTrauncation, mdsPrecision, locationsParameter, distanceTable);
+                return new MultidimensionalScalingLikelihood(mdsDimension, includeTrauncation, mdsPrecision, (MatrixParameter)locationsParameter, distanceTable);
             } else {
                 return new MultiDimensionalScalingLikelihood(mdsDimension, mdsPrecision, locationsParameter,
-                        distanceTable, includeTrauncation);
+                        distanceTable, includeTrauncation, forceReorder);
             }
         }
 
@@ -322,9 +393,10 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         private final XMLSyntaxRule[] rules = {
                 AttributeRule.newStringRule(FILE_NAME, false, "The name of the file containing the assay table"),
                 AttributeRule.newIntegerRule(MDS_DIMENSION, false, "The dimension of the space for MDS"),
-                new ElementRule(LOCATIONS, MatrixParameter.class),
+                new ElementRule(LOCATIONS, MatrixParameterInterface.class),
                 AttributeRule.newBooleanRule(USE_OLD, true),
                 AttributeRule.newBooleanRule(INCLUDE_TRUNCATION, true),
+                AttributeRule.newBooleanRule(FORCE_REORDER, true),
                 new ElementRule(MDS_PRECISION, Parameter.class)
         };
 
@@ -342,9 +414,11 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
     private String[] locationLabels;
 
     private Parameter mdsPrecisionParameter;
-    private MatrixParameter locationsParameter;
+    private MatrixParameterInterface locationsParameter;
 
     private boolean likelihoodKnown = false;
     private double logLikelihood;
     private double storedLogLikelihood;
+
+    private long flags = 0;
 }
diff --git a/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java b/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
index eba2f76..82b421f 100644
--- a/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
+++ b/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
@@ -43,6 +43,8 @@ public class NativeMDSSingleton {
     public static final String LIBRARY_NAME = "mds_jni";
     public static final String LIBRARY_PATH_LABEL = "mds.library.path";
     public static final String LIBRARY_PLATFORM_NAME = getPlatformSpecificLibraryName();
+    public static final String LIBRARY_PLATFORM_EXTENSION = getPlatformSpecificLibraryExtension();
+    public static final String LIBRARY_PLATFORM_PREFIX = getPlatformSpecificLibraryPrefix();
 
 
     private NativeMDSSingleton() {
@@ -52,10 +54,30 @@ public class NativeMDSSingleton {
         String osName = System.getProperty("os.name").toLowerCase();
         String osArch = System.getProperty("os.arch").toLowerCase();
         if (osName.startsWith("windows")) {
-            if (osArch.equals("i386")) return LIBRARY_NAME + "32";
+            if (osArch.equals("x86") || osArch.equals("i386")) return LIBRARY_NAME + "32";
             if (osArch.startsWith("amd64") || osArch.startsWith("x86_64")) return LIBRARY_NAME + "64";
         }
-        return "lib" + LIBRARY_NAME + ".dylib";
+        return LIBRARY_NAME;
+    }
+
+    private static String getPlatformSpecificLibraryExtension() {
+        String osName = System.getProperty("os.name").toLowerCase();
+        if (osName.startsWith("windows")) {
+            return ".dll";
+        } else if (osName.startsWith("mac")) {
+            return ".dylib";
+        } else {
+            return ".so";
+        }
+    }
+
+    private static String getPlatformSpecificLibraryPrefix() {
+        String osName = System.getProperty("os.name").toLowerCase();
+        if (osName.startsWith("windows")) {
+            return "";
+        } else {
+            return "lib";
+        }
     }
 
     public static NativeMDSSingleton loadLibrary() throws UnsatisfiedLinkError {
@@ -68,9 +90,11 @@ public class NativeMDSSingleton {
                 if (path.length() > 0 && !path.endsWith("/")) {
                     path += "/";
                 }
+                System.load(path + LIBRARY_PLATFORM_PREFIX + LIBRARY_NAME + LIBRARY_PLATFORM_EXTENSION);
+            } else {
+                System.loadLibrary(LIBRARY_PLATFORM_NAME);
             }
 
-            System.load(path + LIBRARY_PLATFORM_NAME);
             INSTANCE = new NativeMDSSingleton();
             System.err.println("MDS library loaded.");
         }
diff --git a/src/dr/app/beagle/tools/BeagleSequenceSimulator.java b/src/dr/app/beagle/tools/BeagleSequenceSimulator.java
index a2de2d5..2e972fa 100644
--- a/src/dr/app/beagle/tools/BeagleSequenceSimulator.java
+++ b/src/dr/app/beagle/tools/BeagleSequenceSimulator.java
@@ -120,7 +120,7 @@ public class BeagleSequenceSimulator {
 				partition.setPartitionNumber(partitionCount);
                 partition.setOutputAncestralSequences(outputAncestralSequences);
 				
-				simulatePartitionCallers.add(new simulatePartitionCallable(
+				simulatePartitionCallers.add(new SimulatePartitionCallable(
 						partition
 //						, partitionCount
 						));
@@ -144,12 +144,12 @@ public class BeagleSequenceSimulator {
 		return alignment;
 	}// END: simulate
 
-	private class simulatePartitionCallable implements Callable<Void> {
+	private class SimulatePartitionCallable implements Callable<Void> {
 
 		private Partition partition;
 //        private int partitionNumber;
 		
-		private simulatePartitionCallable(Partition partition
+		private SimulatePartitionCallable(Partition partition
 //				, int partitionNumber
 				) {
 			this.partition = partition;
@@ -170,7 +170,7 @@ public class BeagleSequenceSimulator {
 			return null;
 		}// END: call
 
-	}// END: simulatePartitionCallable class
+	}// END: SimulatePartitionCallable class
 
 	private SimpleAlignment compileAlignment() {
 
diff --git a/src/dr/app/beast/BeastMain.java b/src/dr/app/beast/BeastMain.java
index 2a95c84..b3c20f3 100644
--- a/src/dr/app/beast/BeastMain.java
+++ b/src/dr/app/beast/BeastMain.java
@@ -323,7 +323,7 @@ public class BeastMain {
                         new Arguments.IntegerOption("errors", "Specify maximum number of numerical errors before stopping"),
                         new Arguments.IntegerOption("threads", "The number of computational threads to use (default auto)"),
                         new Arguments.Option("java", "Use Java only, no native implementations"),
-                        new Arguments.RealOption("threshold", 0.0, Double.MAX_VALUE, "Full evaluation test threshold (default 1E-6)"),
+                        new Arguments.RealOption("threshold", 0.0, Double.MAX_VALUE, "Full evaluation test threshold (default 0.1)"),
 
                         new Arguments.Option("beagle_off", "Don't use the BEAGLE library"),
                         new Arguments.Option("beagle", "Use BEAGLE library if available (default on)"),
@@ -359,6 +359,13 @@ public class BeastMain {
 
         int argumentCount = 0;
 
+        StringBuilder commandLine = new StringBuilder(args[0]);
+        for (int i = 1; i < args.length; i++) {
+            commandLine.append(" ");
+            commandLine.append(args[i]);
+        }
+        System.setProperty("command_line", commandLine.toString());
+
         try {
             argumentCount = arguments.parseArguments(args);
         } catch (Arguments.ArgumentException ae) {
@@ -474,11 +481,13 @@ public class BeastMain {
         if (!arguments.hasOption("beagle_SSE_off")) {
             beagleFlags |= BeagleFlag.VECTOR_SSE.getMask();
         }
-        if (arguments.hasOption("beagle_double")) {
-            beagleFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
-        }
+//        if (arguments.hasOption("beagle_double")) {
+//            beagleFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
+//        }
         if (arguments.hasOption("beagle_single")) {
             beagleFlags |= BeagleFlag.PRECISION_SINGLE.getMask();
+        } else {
+            beagleFlags |= BeagleFlag.PRECISION_DOUBLE.getMask();
         }
         if (arguments.hasOption("beagle_async")) {
             beagleFlags |= BeagleFlag.COMPUTATION_ASYNCH.getMask();
@@ -497,7 +506,7 @@ public class BeastMain {
         }
 
         if (arguments.hasOption("beagle_rescale")) {
-            System.setProperty("beagle.rescale", Integer.toString(arguments.getIntegerOption("beagle_rescale")));
+            System.setProperty("beagle.rescale", Long.toString(arguments.getLongOption("beagle_rescale")));
         }
 
         // ============= Other settings =============
@@ -640,6 +649,8 @@ public class BeastMain {
                 }
                 if (dialog.preferBeagleSSE()) {
                     beagleFlags |= BeagleFlag.VECTOR_SSE.getMask();
+                } else {
+                    beagleFlags &= ~BeagleFlag.VECTOR_SSE.getMask();
                 }
                 if (dialog.preferBeagleGPU()) {
                     beagleFlags |= BeagleFlag.PROCESSOR_GPU.getMask();
diff --git a/src/dr/app/beast/BeastVersion.java b/src/dr/app/beast/BeastVersion.java
index 60379e8..dcbb55f 100644
--- a/src/dr/app/beast/BeastVersion.java
+++ b/src/dr/app/beast/BeastVersion.java
@@ -46,13 +46,13 @@ public class BeastVersion implements Version {
      */
     private static final String VERSION = "1.8.3";
 
-    private static final String DATE_STRING = "2002-2015";
+    private static final String DATE_STRING = "2002-2016";
 
-    private static final boolean IS_PRERELEASE = true;
+    private static final boolean IS_PRERELEASE = false;
 
     // this is now being manually updated since the move to GitHub. Using date in yyyymmdd format (suffix
     // with b,c,d etc if multiple revisions in a day.
-    private static final String REVISION = "GitHub 20150808";
+    private static final String REVISION = "GitHub 20160213";
 
     public String getVersion() {
         return VERSION;
diff --git a/src/dr/app/beast/RBeastMain.java b/src/dr/app/beast/RBeastMain.java
new file mode 100644
index 0000000..bd22d4f
--- /dev/null
+++ b/src/dr/app/beast/RBeastMain.java
@@ -0,0 +1,63 @@
+/*
+ * RBeastMain.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beast;
+
+import java.io.IOException;
+import java.security.Permission;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class RBeastMain {
+
+    // Adopted from http://www.avanderw.co.za/preventing-calls-to-system-exit-in-java/
+
+    private static class SystemExitControl {
+
+        public static class ExitTrappedException extends SecurityException { }
+
+        public static void forbidSystemExitCall() {
+            final SecurityManager securityManager = new SecurityManager() {
+                @Override
+                public void checkPermission(Permission permission) {
+                    if (permission.getName().contains("exitVM")) {
+                        throw new ExitTrappedException();
+                    }
+                }
+            };
+            System.setSecurityManager(securityManager);
+        }
+
+        public static void enableSystemExitCall() {
+            System.setSecurityManager(null);
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        SystemExitControl.forbidSystemExitCall();
+        BeastMain.main(args);
+    }
+}
diff --git a/src/dr/app/beast/development_parsers.properties b/src/dr/app/beast/development_parsers.properties
index 6a62686..0242195 100644
--- a/src/dr/app/beast/development_parsers.properties
+++ b/src/dr/app/beast/development_parsers.properties
@@ -53,11 +53,11 @@ dr.inferencexml.model.MatrixMatrixProductParser
 dr.inferencexml.model.MatrixVectorProductParameterParser
 dr.inferencexml.model.DifferenceMatrixParameterParser
 dr.inferencexml.model.DifferenceParameterParser
+dr.inferencexml.model.SumParameterParser
+dr.inferencexml.model.ImmutableParameterParser
 
 # DISTRIBUTIONS
 
-dr.inferencexml.distribution.ScaledBetaDistributionModelParser
-dr.inferencexml.distribution.InverseGammaDistributionModelParser
 dr.inferencexml.distribution.MomentDistributionModelParser
 
 # STRUCTURED COALESCENT
@@ -76,6 +76,7 @@ dr.evomodel.approxPopTree.PopTreeModel
 
 # TREE OPERATORS
 dr.evomodelxml.operators.SubtreeJumpOperatorParser
+dr.evomodelxml.operators.LatentFactorHamiltonianMCParser
 
 # TREE LIKELIHOOD
 dr.evomodel.treelikelihood.AdvancedTreeLikelihood
@@ -123,8 +124,8 @@ dr.evomodel.continuous.TreeTraitSimulator
 dr.inferencexml.operators.SwapParameterOperatorParser
 
 #OPERATORS
-dr.inferencexml.operators.MsatFullAncestryImportanceSamplingOperatorParser
-dr.inferencexml.operators.MsatSingleAncestralStateGibbsOperatorParser
+dr.evomodelxml.operators.MsatFullAncestryImportanceSamplingOperatorParser
+dr.evomodelxml.operators.MsatSingleAncestralStateGibbsOperatorParser
 dr.evomodelxml.sitemodel.DiscretizedLociRatesParser
 dr.evomodelxml.sitemodel.SampleQuantileLociRatesParser
 dr.evoxml.MsatPatternStatisticParser
@@ -214,5 +215,41 @@ dr.app.beagle.evomodel.branchmodel.lineagespecific.RatioParameterParser
 # NEW Bayesian MDS
 dr.app.beagle.multidimensionalscaling.MultiDimensionalScalingLikelihood
 dr.inference.model.CompoundMatrixParameter
+dr.inference.model.CompoundFastMatrixParameter
 dr.inference.model.CopyParameterValuesParser
-
+dr.inferencexml.distribution.CompoundGaussianProcessParser
+dr.app.beagle.evomodel.parsers.NewBeagleTreeLikelihoodParser
+dr.inference.model.FastMatrixParameter
+
+###############################################################################################
+#Antigenic phylo-clustering:
+dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior
+#Antigenic phylo-clustering's operators
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.TreeClusterAlgorithmOperator
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.RandomWalkOnActiveMu
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.randomWalkSerumDriftAndMu
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.serumDriftActiveScaledMu1Operator
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.serumPrecisionSerumLocOperator
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.muPrecisionInactiveMuOperator
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.ProbSitesGibbsOperator
+dr.evomodel.antigenic.phyloClustering.MCMCOperators.ProbGenericSiteGibbsOperator
+#Antigenic phylo-clustering's statistics
+dr.evomodel.antigenic.phyloClustering.statistics.indicatorsStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.ActiveIndicatorsStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.PathStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.KStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.muStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.DriftedMuStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.ClusterLabelsVirusesStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.DriftedTreeClusterLocationsStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.DriverCountStatistic
+dr.evomodel.antigenic.phyloClustering.statistics.CausalMutationsLogger
+#Antigenic phylo-clustering's statistics for the tree
+dr.evomodel.antigenic.phyloClustering.statistics.AnnotateMuTreeTrait
+dr.evomodel.antigenic.phyloClustering.statistics.AnnotateLocationParameterTreeTrait
+dr.evomodel.antigenic.phyloClustering.statistics.NodeNumberTreeTrait
+dr.evomodel.antigenic.phyloClustering.statistics.MutationsTreeTrait
+dr.evomodel.antigenic.phyloClustering.statistics.ClusterLabelsVirusesTreeTrait
+#Simulate clusters and HI
+dr.evomodel.antigenic.phyloClustering.misc.simulateClusters
+###############################################################################################
diff --git a/src/dr/app/beast/release_parsers.properties b/src/dr/app/beast/release_parsers.properties
index 44b9d1a..14d9a47 100644
--- a/src/dr/app/beast/release_parsers.properties
+++ b/src/dr/app/beast/release_parsers.properties
@@ -85,6 +85,8 @@ dr.inferencexml.model.JointParameterParser
 dr.inferencexml.model.MaskedParameterParser
 dr.inferencexml.model.DuplicatedParameterParser
 dr.inferencexml.model.ProductParameterParser
+dr.inferencexml.model.DifferenceParameterParser
+dr.inferencexml.model.SumParameterParser
 
 
 # SITE MODELS
@@ -147,6 +149,7 @@ dr.evomodelxml.coalescent.ConstantExponentialModelParser
 dr.evomodelxml.coalescent.ConstantLogisticModelParser
 dr.evomodelxml.coalescent.ExpansionModelParser
 dr.evomodelxml.coalescent.ExponentialExponentialModelParser
+dr.evomodelxml.coalescent.MultiEpochExponentialModelParser
 
 dr.evomodelxml.coalescent.EmergingEpidemicModelParser
 
@@ -275,6 +278,15 @@ dr.evomodelxml.operators.MulTreeNodeSlideParser
 dr.evomodelxml.operators.MulTreeSequenceReassignmentParser
 dr.evomodelxml.operators.PopsIOTreeNodeSlideParser
 dr.evomodel.continuous.GibbsIndependentCoalescentOperator
+dr.evomodelxml.operators.MicrosatUpDownOperatorParser
+dr.evomodelxml.operators.MsatBitFlipOperatorParser
+dr.evomodelxml.operators.MicrosatelliteModelSelectOperatorParser
+dr.evomodelxml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser
+dr.evomodelxml.operators.RandomWalkIntegerSetSizeWeightedOperatorParser
+
+dr.evomodelxml.operators.SubtreeJumpOperatorParser
+dr.evomodelxml.operators.SubtreeLeapOperatorParser
+
 
 # RATE OPERATORS
 dr.evomodelxml.operators.RateScaleOperatorParser
@@ -309,6 +321,8 @@ dr.inferencexml.distribution.OnePGammaDistributionModelParser
 dr.inferencexml.distribution.DirichletProcessLikelihoodParser
 dr.inferencexml.distribution.BetaDistributionModelParser
 dr.inferencexml.distribution.SkewNormalDistributionModelParser
+dr.inferencexml.distribution.ScaledBetaDistributionModelParser
+dr.inferencexml.distribution.InverseGammaDistributionModelParser
 
 dr.inferencexml.distribution.PriorParsers
 dr.inferencexml.distribution.WorkingPriorParsers
@@ -342,7 +356,6 @@ dr.inferencexml.operators.LogRandomWalkOperatorParser
 dr.inferencexml.operators.UniformOperatorParser
 dr.inferencexml.operators.UniformIntegerOperatorParser
 dr.inferencexml.operators.UpDownOperatorParser
-dr.inferencexml.operators.MicrosatUpDownOperatorParser
 dr.inferencexml.operators.SetOperatorParser
 dr.inferencexml.operators.SwapOperatorParser
 dr.inferencexml.operators.DeltaExchangeOperatorParser
@@ -357,11 +370,6 @@ dr.inferencexml.operators.SelectorOperatorParser
 dr.inferencexml.operators.ValuesPoolSwapOperatorParser
 dr.inferencexml.operators.DirtyLikelihoodOperatorParser
 dr.inferencexml.operators.SimpleOperatorScheduleParser
-dr.inferencexml.operators.MsatBitFlipOperatorParser
-dr.inferencexml.operators.MicrosatelliteModelSelectOperatorParser
-dr.inferencexml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser
-dr.inferencexml.operators.RandomWalkIntegerSetSizeWeightedOperatorParser
-
 dr.inference.operators.GibbsIndependentGammaOperator
 dr.inference.operators.GibbsIndependentNormalDistributionOperator
 
diff --git a/src/dr/app/beauti/BeautiFrame.java b/src/dr/app/beauti/BeautiFrame.java
index ef8db38..f32f7f7 100644
--- a/src/dr/app/beauti/BeautiFrame.java
+++ b/src/dr/app/beauti/BeautiFrame.java
@@ -34,6 +34,7 @@
 package dr.app.beauti;
 
 import dr.app.beauti.ancestralStatesPanel.AncestralStatesPanel;
+import dr.app.beauti.clockModelsPanel.ClockModelsPanel;
 import dr.app.beauti.clockModelsPanel.OldClockModelsPanel;
 import dr.app.beauti.components.ComponentFactory;
 import dr.app.beauti.components.ancestralstates.AncestralStatesComponentFactory;
@@ -47,6 +48,7 @@ import dr.app.beauti.components.sequenceerror.SequenceErrorModelComponentFactory
 import dr.app.beauti.components.tipdatesampling.TipDateSamplingComponentFactory;
 import dr.app.beauti.datapanel.DataPanel;
 import dr.app.beauti.generator.BeastGenerator;
+import dr.app.beauti.generator.ComponentGenerator;
 import dr.app.beauti.generator.Generator;
 import dr.app.beauti.mcmcpanel.MCMCPanel;
 import dr.app.beauti.operatorspanel.OperatorsPanel;
@@ -122,7 +124,7 @@ public class BeautiFrame extends DocumentFrame {
     private SpeciesSetPanel speciesSetPanel;
     private SiteModelsPanel siteModelsPanel;
     private AncestralStatesPanel ancestralStatesPanel;
-    private OldClockModelsPanel clockModelsPanel;
+    private ClockModelsPanel clockModelsPanel;
     private TreesPanel treesPanel;
     private PriorsPanel priorsPanel;
     private OperatorsPanel operatorsPanel;
@@ -186,7 +188,8 @@ public class BeautiFrame extends DocumentFrame {
         speciesSetPanel = new SpeciesSetPanel(this);
         siteModelsPanel = new SiteModelsPanel(this, getDeleteAction());
         ancestralStatesPanel = new AncestralStatesPanel(this);
-        clockModelsPanel = new OldClockModelsPanel(this);
+        clockModelsPanel = new ClockModelsPanel(this);
+//        clockModelsPanel = new OldClockModelsPanel(this);
 //        oldTreesPanel = new OldTreesPanel(this);
         treesPanel = new TreesPanel(this, getDeleteAction());
 //        speciesTreesPanel = new SpeciesTreesPanel(this);
@@ -784,7 +787,7 @@ public class BeautiFrame extends DocumentFrame {
      * @param title
      * @return
      */
-    private File[] selectImportFiles(final String title, boolean multipleSelection, FileNameExtensionFilter[] fileNameExtensionFilters) {
+    public File[] selectImportFiles(final String title, boolean multipleSelection, FileNameExtensionFilter[] fileNameExtensionFilters) {
         if (Boolean.parseBoolean(System.getProperty("use.native.choosers", Boolean.toString(OSType.isMac())))) {
             FileDialog importDialog = fileDialogs.get(title);
             if (importDialog == null) {
diff --git a/src/dr/app/beauti/ancestralStatesPanel/AncestralStatesOptionsPanel.java b/src/dr/app/beauti/ancestralStatesPanel/AncestralStatesOptionsPanel.java
index 6af351d..191b9db 100644
--- a/src/dr/app/beauti/ancestralStatesPanel/AncestralStatesOptionsPanel.java
+++ b/src/dr/app/beauti/ancestralStatesPanel/AncestralStatesOptionsPanel.java
@@ -59,6 +59,10 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
             + "Suchard (2012). This model requires a 3-partition codon model to be<br>"
             + "selected in the Site model for this partition and NO Site Heterogeneity Model.</html>";
 
+    private static final String COMPLETE_HISTORY_LOGGING_TOOL_TIP = "<html>"
+            + "Log a complete history of realised state changes to the tree log file.<br>"
+            + "This can make the files very large but can be useful for post hoc analysis.</html>";
+
     // Components
     private static final long serialVersionUID = -1645661616353099424L;
 
@@ -73,6 +77,8 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
             "Reconstruct state change counts");
     private JCheckBox dNdSRobustCountingCheck = new JCheckBox(
             "Reconstruct synonymous/non-synonymous change counts");
+    private JCheckBox completeHistoryLoggingCheck = new JCheckBox(
+            "Reconstruct complete change history on tree");
 
 
     private JTextArea dNnSText = new JTextArea(
@@ -123,6 +129,9 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
         PanelUtils.setupComponent(dNdSRobustCountingCheck);
         dNdSRobustCountingCheck.setToolTipText(DNDS_ROBUST_COUNTING_TOOL_TIP);
 
+        PanelUtils.setupComponent(completeHistoryLoggingCheck);
+        completeHistoryLoggingCheck.setToolTipText(COMPLETE_HISTORY_LOGGING_TOOL_TIP);
+
         // ////////////////////////
         PanelUtils.setupComponent(errorModelCombo);
         errorModelCombo.setToolTipText("<html>Select how to model sequence error or<br>"
@@ -137,6 +146,7 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
         mrcaReconstructionCombo.setSelectedItem(ancestralStatesComponent.getMRCATaxonSet(partition));
         countingCheck.setSelected(ancestralStatesComponent.isCountingStates(partition));
         dNdSRobustCountingCheck.setSelected(ancestralStatesComponent.dNdSRobustCounting(partition));
+        completeHistoryLoggingCheck.setSelected(ancestralStatesComponent.isCompleteHistoryLogging(partition));
 
         sequenceErrorComponent = (SequenceErrorModelComponentOptions)options.getComponentOptions(SequenceErrorModelComponentOptions.class);
 //        sequenceErrorComponent.createParameters(options); // this cannot create correct param here, because of improper design
@@ -160,6 +170,7 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
 //                    boolean enableSimpleCounting = !dNdSRobustCountingCheck.isSelected();
 //                    countingCheck.setEnabled(enableSimpleCounting);
 //                }
+                completeHistoryLoggingCheck.setEnabled(countingCheck.isSelected() || dNdSRobustCountingCheck.isSelected());
             }
         };
 
@@ -168,6 +179,7 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
         mrcaReconstructionCombo.addItemListener(listener);
         countingCheck.addItemListener(listener);
         dNdSRobustCountingCheck.addItemListener(listener);
+        completeHistoryLoggingCheck.addItemListener(listener);
 
         errorModelCombo.addItemListener(listener);
     }
@@ -190,6 +202,7 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
         ancestralStatesComponent.setCountingStates(partition, countingCheck.isSelected());
 //        ancestralStatesComponent.setDNdSRobustCounting(partition, robustCountingCheck.isSelected());
         ancestralStatesComponent.setDNdSRobustCounting(partition, dNdSRobustCountingCheck.isSelected());
+        ancestralStatesComponent.setCompleteHistoryLogging(partition, completeHistoryLoggingCheck.isSelected());
 
         sequenceErrorComponent.setSequenceErrorType(partition, (SequenceErrorType)errorModelCombo.getSelectedItem());
         sequenceErrorComponent.createParameters(options);
@@ -248,7 +261,16 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
         removeAll();
 
         if (ancestralReconstructionAvailable) {
-            addSpanningComponent(new JLabel("Ancestral State Reconstruction:"));
+            if (partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
+                // mergedPatterns for codon positions 1&2 will always be compressed...
+                // so cannot do any of this stuff. Disable it and provide an explanation.
+                addSpanningComponent(new JLabel("<html>Unable to provide these options with the 1+2,3 codon<br>" +
+                        "position model. Use a 1,2,3 codon position model instead.<br><html>"));
+            }
+
+
+            JLabel label1 = new JLabel("Ancestral State Reconstruction:");
+            addSpanningComponent(label1);
 
             addComponent(ancestralReconstructionCheck);
 
@@ -260,42 +282,64 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
             panel.add(mrcaReconstructionCheck);
             panel.add(mrcaReconstructionCombo);
             addComponent(panel);
+
+            boolean enabled = true;
+
+            if (partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
+                // mergedPatterns for codon positions 1&2 will always be compressed...
+                // so cannot do any of this stuff. Disable it and provide an explanation.
+                ancestralReconstructionCheck.setEnabled(false);
+                enabled = false;
+            }
+
+            label1.setEnabled(enabled);
+            panel.setEnabled(enabled);
+            ancestralReconstructionCheck.setEnabled(enabled);
+            mrcaReconstructionCheck.setEnabled(enabled);
+            mrcaReconstructionCombo.setEnabled(enabled);
         }
 
         if (countingAvailable) {
             if (ancestralReconstructionAvailable) {
                 addSeparator();
             }
-            addSpanningComponent(new JLabel("State Change Count Reconstruction:"));
 
-            JTextArea text = new JTextArea(
+            JLabel label2 = new JLabel("State Change Count Reconstruction:");
+            addSpanningComponent(label2);
+
+            JTextArea text1 = new JTextArea(
                     "Select this option to reconstruct counts of state changes using " +
                             "Markov Jumps. This approach is described in Minin & Suchard (2008).");
-            text.setColumns(40);
-            PanelUtils.setupComponent(text);
-            addComponent(text);
+            text1.setColumns(40);
+            PanelUtils.setupComponent(text1);
+            addComponent(text1);
 
             addComponent(countingCheck);
 
             boolean enableSimpleCounting = true;
 
             // TODO Simple counting is currently not available for codon partitioned models due to BEAUti limitation
-            if (ancestralStatesComponent.dNdSRobustCountingAvailable(partition)) {
+            if (ancestralStatesComponent.dNdSRobustCountingAvailable(partition) || partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
                 enableSimpleCounting = false;
                 countingCheck.setSelected(false);
             }
+
             countingCheck.setEnabled(enableSimpleCounting);
+            label2.setEnabled(enableSimpleCounting);
+            text1.setEnabled(enableSimpleCounting);
+
+            JTextArea text2 = null;
 
             if (dNdSRobustCountingAvailable) {
-                addSeparator();
-                text = new JTextArea(
+//                addSeparator();
+                text2 = new JTextArea(
                         "Renaissance counting: select this option to reconstruct counts of synonymous and nonsynonymous " +
                                 "changes using Robust Counting. This approach is described in O'Brien, Minin " +
                                 "& Suchard (2009) and Lemey, Minin, Bielejec, Kosakovsky-Pond & Suchard " +
                                 "(2012):");
-                text.setColumns(40);
-                PanelUtils.setupComponent(text);
-                addComponent(text);
+                text2.setColumns(40);
+                PanelUtils.setupComponent(text2);
+                addComponent(text2);
 
                 addComponent(dNdSRobustCountingCheck);
 
@@ -310,20 +354,34 @@ public class AncestralStatesOptionsPanel extends OptionsPanel {
 
                 ancestralStatesComponent.setDNdSRobustCounting(partition, enableRC && dNdSRobustCountingCheck.isSelected());
 
+                text2.setEnabled(enableRC);
                 dNnSText.setEnabled(enableRC);
                 if (!enableRC) {
                     dNdSRobustCountingCheck.setSelected(false);
                 }
             }
+
+            addComponent(completeHistoryLoggingCheck);
+            completeHistoryLoggingCheck.setEnabled(countingCheck.isSelected() || dNdSRobustCountingCheck.isSelected());
+
         }
 
         if (errorModelAvailable) {
             if (ancestralReconstructionAvailable || countingAvailable) {
                 addSeparator();
             }
-            addSpanningComponent(new JLabel("Sequence error model:"));
-            addComponentWithLabel("Error Model:", errorModelCombo);
+            JLabel label3 = new JLabel("Sequence error model:");
+            addSpanningComponent(label3);
+            JLabel label4 = addComponentWithLabel("Error Model:", errorModelCombo);
+
+            boolean enabled = (partition.getPartitionSubstitutionModel().getCodonPartitionCount() != 2);
+
+            label3.setEnabled(enabled);
+            label4.setEnabled(enabled);
+            errorModelCombo.setEnabled(enabled);
+
         }
+
         isUpdating = false;
 
     }
diff --git a/src/dr/app/beauti/clockModelsPanel/ClockModelsPanel.java b/src/dr/app/beauti/clockModelsPanel/ClockModelsPanel.java
index ea3e15f..218faea 100644
--- a/src/dr/app/beauti/clockModelsPanel/ClockModelsPanel.java
+++ b/src/dr/app/beauti/clockModelsPanel/ClockModelsPanel.java
@@ -45,35 +45,32 @@ import javax.swing.event.ListSelectionListener;
 import javax.swing.plaf.BorderUIResource;
 import javax.swing.table.AbstractTableModel;
 import javax.swing.table.TableColumn;
+import javax.swing.table.TableColumnModel;
 import java.awt.*;
+import java.awt.event.ActionEvent;
 import java.awt.event.ItemEvent;
 import java.awt.event.ItemListener;
+import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.logging.Logger;
 
 /**
  * @author Andrew Rambaut
- * @author Alexei Drummond
- * @author Walter Xie
  * @version $Id: ModelPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
- * @deprecated
  */
 public class ClockModelsPanel extends BeautiPanel implements Exportable {
 
-    public final static boolean DEBUG = false;
+        public final static boolean DEBUG = false;
 
     private static final long serialVersionUID = 2778103564318492601L;
 
     private static final int MINIMUM_TABLE_WIDTH = 140;
 
-    private final String[] columnToolTips = {null, "Molecular clock model",
-            "Decide whether to estimate molecular clock model",
-            "Provide the rate if it is fixed"};
-
-    JTable clockTable = null;
-    ClockTableModel clockTableModel = null;
-    BeautiOptions options = null;
+    private JTable modelTable = null;
+    private ModelTableModel modelTableModel = null;
+    private BeautiOptions options = null;
 
     JPanel modelPanelParent;
     PartitionClockModel currentModel = null;
@@ -87,6 +84,9 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
 //    CreateModelDialog createModelDialog = null;
     boolean settingOptions = false;
 
+    private CloneModelDialog cloneModelDialog = null;
+
+    CloneModelsAction cloneModelsAction = new CloneModelsAction();
 
     public ClockModelsPanel(BeautiFrame parent) {
 
@@ -94,30 +94,28 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
 
         this.frame = parent;
 
-        clockTableModel = new ClockTableModel();
-        clockTable = new JTable(clockTableModel); // {
-            //Implement table header tool tips.
-//            protected JTableHeader createDefaultTableHeader() {
-//                return new JTableHeader(columnModel) {
-//                    public String getToolTipText(MouseEvent e) {
-//                        Point p = e.getPoint();
-//                        int index = columnModel.getColumnIndexAtX(p.x);
-//                        int realIndex = columnModel.getColumn(index).getModelIndex();
-//                        return columnToolTips[realIndex];
-//                    }
-//                };
-//            }
-//        };
-
-        initTable(clockTable);
-
-        clockTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
+        modelTableModel = new ModelTableModel();
+        modelTable = new JTable(modelTableModel);
+
+        modelTable.getTableHeader().setReorderingAllowed(false);
+        modelTable.getTableHeader().setResizingAllowed(false);
+//        modelTable.getTableHeader().setDefaultRenderer(
+//                new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+
+        final TableColumnModel model = modelTable.getColumnModel();
+        final TableColumn tableColumn0 = model.getColumn(0);
+        tableColumn0.setCellRenderer(new ModelsTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+
+        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(modelTable);
+
+        modelTable.getSelectionModel().setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
+        modelTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
             public void valueChanged(ListSelectionEvent evt) {
                 selectionChanged();
             }
         });
 
-        JScrollPane scrollPane = new JScrollPane(clockTable,
+        JScrollPane scrollPane = new JScrollPane(modelTable,
                 JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
         scrollPane.setOpaque(false);
 
@@ -135,6 +133,16 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
 //        panel.add(controlPanel1, BorderLayout.SOUTH);
         panel.setMinimumSize(new Dimension(MINIMUM_TABLE_WIDTH, 0));
 
+        JToolBar toolBar = new JToolBar();
+        toolBar.setFloatable(false);
+        toolBar.setOpaque(false);
+
+        toolBar.setLayout(new FlowLayout(java.awt.FlowLayout.LEFT, 0, 0));
+        JButton button = new JButton(cloneModelsAction);
+        PanelUtils.setupComponent(button);
+        toolBar.add(button);
+        panel.add(toolBar, BorderLayout.SOUTH);
+
         modelPanelParent = new JPanel(new FlowLayout(FlowLayout.CENTER));
         modelPanelParent.setOpaque(false);
         modelBorder = new TitledBorder("Substitution Model");
@@ -148,82 +156,15 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
         scrollPane2.getViewport().setOpaque(false);
 
         JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, panel, scrollPane2);
-        splitPane.setDividerLocation(0.5);
+        splitPane.setDividerLocation(MINIMUM_TABLE_WIDTH);
         splitPane.setContinuousLayout(true);
         splitPane.setBorder(BorderFactory.createEmptyBorder());
         splitPane.setOpaque(false);
 
-        PanelUtils.setupComponent(fixedMeanRateCheck);
-        fixedMeanRateCheck.setSelected(false); // default to FixRateType.ESTIMATE
-        fixedMeanRateCheck.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent ev) {
-                meanRateField.setEnabled(fixedMeanRateCheck.isSelected());
-//                if (fixedMeanRateCheck.isSelected()) {
-//                    options.clockModelOptions.fixMeanRate();
-//                } else {
-//                    options.clockModelOptions.fixRateOfFirstClockPartition();
-//                }
-
-                clockTableModel.fireTableDataChanged();
-                fireModelsChanged();
-            }
-        });
-        fixedMeanRateCheck.setToolTipText("<html>Select this option to fix the mean substitution rate,<br>"
-                + "rather than try to infer it. If this option is turned off, then<br>"
-                + "either the sequences should have dates or the tree should have<br>"
-                + "sufficient calibration informations specified as priors.<br>"
-                + "In addition, it is only available for multi-clock partitions." + "</html>");// TODO Alexei
-
-        PanelUtils.setupComponent(meanRateField);
-        meanRateField.setEnabled(fixedMeanRateCheck.isSelected());
-        meanRateField.setValue(1.0);
-        meanRateField.addKeyListener(new java.awt.event.KeyAdapter() {
-            public void keyTyped(java.awt.event.KeyEvent ev) {
-                frame.setDirty();
-            }
-        });
-        meanRateField.setToolTipText("<html>Enter the fixed mean rate here.</html>");
-        meanRateField.setColumns(10);
-
-        OptionsPanel panel2 = new OptionsPanel(12, 12);
-        panel2.addComponents(fixedMeanRateCheck, meanRateField);
-
         setOpaque(false);
         setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
         setLayout(new BorderLayout(0, 0));
         add(splitPane, BorderLayout.CENTER);
-        add(panel2, BorderLayout.SOUTH);
-    }
-
-    private void initTable(JTable clockTable){
-//        clockTable.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
-        clockTable.getTableHeader().setReorderingAllowed(false);
-//        clockTable.getTableHeader().setDefaultRenderer(new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-
-        TableColumn col = clockTable.getColumnModel().getColumn(0);
-        col.setCellRenderer(new ClockTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-//        col.setMinWidth(80);
-
-        col = clockTable.getColumnModel().getColumn(1);
-        ComboBoxRenderer comboBoxRenderer = new ComboBoxRenderer();
-        comboBoxRenderer.putClientProperty("JComboBox.isTableCellEditor", Boolean.TRUE);
-//        col.setMinWidth(260);
-
-        col = clockTable.getColumnModel().getColumn(2);
-        col.setMinWidth(60);
-        col.setMaxWidth(60);
-
-        col = clockTable.getColumnModel().getColumn(3);
-        col.setCellRenderer(new ClockTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        col.setCellEditor(new RealNumberCellEditor(0, Double.POSITIVE_INFINITY));
-//        col.setMinWidth(80);
-
-        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(clockTable);
-    }
-
-    private void modelsChanged() {
-        TableColumn col = clockTable.getColumnModel().getColumn(1);
-        col.setCellEditor(new DefaultCellEditor(new JComboBox(EnumSet.range(ClockType.STRICT_CLOCK, ClockType.RANDOM_LOCAL_CLOCK).toArray())));
     }
 
     private void resetPanel() {
@@ -239,34 +180,33 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
 
     public void setOptions(BeautiOptions options) {
 
+        if (DEBUG) {
+            Logger.getLogger("dr.app.beauti").info("ModelsPanel.setOptions");
+        }
+
         this.options = options;
 
         resetPanel();
 
         settingOptions = true;
 
-//        fixedMeanRateCheck.setSelected(options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN);
-//        fixedMeanRateCheck.setEnabled(!(options.clockModelOptions.getRateOptionClockModel() == FixRateType.TIP_CALIBRATED
-//                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.NODE_CALIBRATED
-//                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RATE_CALIBRATED));
-//        meanRateField.setValue(options.clockModelOptions.getMeanRelativeRate());
-
-        int selRow = clockTable.getSelectedRow();
-        clockTableModel.fireTableDataChanged();
+        int selRow = modelTable.getSelectedRow();
+        modelTableModel.fireTableDataChanged();
         if (options.getPartitionSubstitutionModels().size() > 0) {
             if (selRow < 0) {
                 selRow = 0;
             }
-            clockTable.getSelectionModel().setSelectionInterval(selRow, selRow);
+            modelTable.getSelectionModel().setSelectionInterval(selRow, selRow);
         }
 
         if (currentModel == null && options.getPartitionClockModels().size() > 0) {
-            clockTable.getSelectionModel().setSelectionInterval(0, 0);
+            modelTable.getSelectionModel().setSelectionInterval(0, 0);
         }
 
-        modelsChanged();
-
         settingOptions = false;
+
+        validate();
+        repaint();
     }
 
     public void getOptions(BeautiOptions options) {
@@ -282,17 +222,34 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
     }
 
     private void selectionChanged() {
-        int selRow = clockTable.getSelectedRow();
+        if (modelTable.getSelectedRowCount() == 1) {
+            int selRow = modelTable.getSelectedRow();
 
         if (selRow >= options.getPartitionClockModels().size()) {
             selRow = 0;
-            clockTable.getSelectionModel().setSelectionInterval(selRow, selRow);
+                modelTable.getSelectionModel().setSelectionInterval(selRow, selRow);
         }
 
         if (selRow >= 0) {
             setCurrentModel(options.getPartitionClockModels().get(selRow));
 //            frame.modelSelectionChanged(!isUsed(selRow));
+            }
+        } else {
+            setCurrentModels(getSelectedModels());
+        }
+    }
+
+    private java.util.List<PartitionClockModel> getSelectedModels() {
+        java.util.List<PartitionClockModel> models = new ArrayList<PartitionClockModel>();
+
+        for (int row : modelTable.getSelectedRows()) {
+            models.add(options.getPartitionClockModels().get(row));
         }
+        if (models.size() == 0) {
+            models.addAll(options.getPartitionClockModels());
+        }
+
+        return models;
     }
 
     /**
@@ -301,25 +258,46 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
      * @param model the new model to display
      */
     private void setCurrentModel(PartitionClockModel model) {
+        modelPanelParent.removeAll();
 
-        if (model != null) {
-            if (currentModel != null) modelPanelParent.removeAll();
+        currentModel = model;
 
-            PartitionClockModelPanel panel = modelPanels.get(model);
+        if (currentModel != null) {
+            PartitionClockModelPanel panel = modelPanels.get(currentModel);
             if (panel == null) {
                 panel = new PartitionClockModelPanel(model);
                 modelPanels.put(model, panel);
             }
 
-            currentModel = model;
+            panel.setOptions();
             modelPanelParent.add(panel);
 
+        } else {
+
+        }
+
+        cloneModelsAction.setEnabled(true);
+
             updateBorder();
         }
+
+    private void setCurrentModels(java.util.List<PartitionClockModel> models) {
+        modelPanelParent.removeAll();
+
+        currentModel = null;
+
+        updateBorder();
+        cloneModelsAction.setEnabled(true);
+
+        repaint();
     }
 
     private void updateBorder() {
-        modelBorder.setTitle("Clock Model - " + currentModel.getName());
+        if (currentModel != null) {
+            modelBorder.setTitle("Clock Model - " + currentModel.getName());
+        } else {
+            modelBorder.setTitle("Multiple clock models selected");
+        }
         repaint();
     }
 
@@ -333,15 +311,43 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
         return false;
     }
 
+
+    private void cloneModelSettings() {
+        if (cloneModelDialog == null) {
+            cloneModelDialog = new CloneModelDialog(frame);
+        }
+
+
+            java.util.List<PartitionClockModel> sourceModels = new ArrayList<PartitionClockModel>();
+
+            for (PartitionClockModel model : options.getPartitionClockModels()) {
+                    sourceModels.add(model);
+            }
+
+            int result = cloneModelDialog.showDialog(sourceModels);
+
+            if (result == -1 || result == JOptionPane.CANCEL_OPTION) {
+                return;
+            }
+
+        PartitionClockModel sourceModel = cloneModelDialog.getSourceModel();
+            for (PartitionClockModel model : getSelectedModels()) {
+                if (!model.equals(sourceModel)) {
+                    throw new UnsupportedOperationException("Not implemented yet");
+//                    model.copyFrom(sourceModel);
+                }
+            }
+
+            repaint();
+
+    }
+
     public JComponent getExportableComponent() {
         return this;
     }
 
     class ModelTableModel extends AbstractTableModel {
 
-        /**
-         *
-         */
         private static final long serialVersionUID = -6707994233020715574L;
         String[] columnNames = {"Clock Model"};
 
@@ -415,6 +421,7 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
         }
     }
 
+
     class ModelsTableCellRenderer extends TableRenderer {
 
         public ModelsTableCellRenderer(int alignment, Insets insets) {
@@ -444,165 +451,16 @@ public class ClockModelsPanel extends BeautiPanel implements Exportable {
 
     }
 
-
-    class ClockTableModel extends AbstractTableModel {
-        private static final long serialVersionUID = -2852144669936634910L;
-
-//        String[] columnNames = {"Clock Model Name", "Molecular Clock Model"};
-        String[] columnNames = {"Name", "Model", "Estimate", "Rate"};
-
-        public ClockTableModel() {
-        }
-
-        public int getColumnCount() {
-//        	if (estimateRelatieRateCheck.isSelected()) {
-//        		return columnNames2.length;
-//        	} else {
-            return columnNames.length;
-//        	}
-        }
-
-        public int getRowCount() {
-            if (options == null) return 0;
-            if (options.getPartitionClockModels().size() < 2) {
-                fixedMeanRateCheck.setEnabled(false);
-            } else {
-                fixedMeanRateCheck.setEnabled(true);
-            }
-            return options.getPartitionClockModels().size();
-        }
-
-        public Object getValueAt(int row, int col) {
-            PartitionClockModel model = options.getPartitionClockModels().get(row);
-            switch (col) {
-                case 0:
-                    return model.getName();
-                case 1:
-                    return model.getClockType();
-                case 2:
-                    return model.isEstimatedRate();
-                case 3:
-                    return model.getRate();
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int row, int col) {
-            PartitionClockModel model = options.getPartitionClockModels().get(row);
-            switch (col) {
-                case 0:
-                    String name = ((String) aValue).trim();
-                    if (name.length() > 0) {
-                        model.setName(name);
-                    }
-                    break;
-                case 1:
-                    model.setClockType((ClockType) aValue);
-                    break;
-                case 2:
-                    model.setEstimatedRate((Boolean) aValue);
-//                    if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.RElATIVE_TO) {
-//                        if (!options.clockModelOptions.validateRelativeTo()) {
-//                            JOptionPane.showMessageDialog(frame, "It must have at least one clock rate to be fixed !",
-//                                    "Validation Of Relative To ?th Rate", JOptionPane.WARNING_MESSAGE);
-//                            model.setEstimatedRate(false);
-//                        }
-//                    }
-                    break;
-                case 3:
-                    model.setRate((Double) aValue, true);
-                    options.selectParameters();
-                    break;
-                default:
-                    throw new IllegalArgumentException("unknown column, " + col);
-            }
-            fireModelsChanged();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            switch (col) {
-                case 2:// Check box
-                    return !fixedMeanRateCheck.isSelected();
-                case 3:
-                    return !fixedMeanRateCheck.isSelected() && !((Boolean) getValueAt(row, 2));
-                default:
-                    return true;
-            }
-        }
-
-        public String getColumnName(int column) {
-            return columnNames[column];
-        }
-
-        public Class getColumnClass(int c) {
-            if (getRowCount() == 0) {
-                return Object.class;
-            }
-            return getValueAt(0, c).getClass();
-        }
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    }
-
-
-    class ClockTableCellRenderer extends TableRenderer {
-
-        public ClockTableCellRenderer(int alignment, Insets insets) {
-            super(alignment, insets);
+    public class CloneModelsAction extends AbstractAction {
+        public CloneModelsAction() {
+            super("Clone Settings...");
+            setToolTipText("Use this tool to copy settings to selected models");
         }
 
-        public Component getTableCellRendererComponent(JTable aTable,
-                                                       Object value,
-                                                       boolean aIsSelected,
-                                                       boolean aHasFocus,
-                                                       int aRow, int aColumn) {
-
-            if (value == null) return this;
-
-            Component renderer = super.getTableCellRendererComponent(aTable,
-                    value,
-                    aIsSelected,
-                    aHasFocus,
-                    aRow, aColumn);
-
-            if (fixedMeanRateCheck.isSelected() && aColumn > 1) {
-                renderer.setForeground(Color.gray);
-            } else if (!fixedMeanRateCheck.isSelected() && aColumn == 3 && (Boolean) aTable.getValueAt(aRow, 2)) {
-                renderer.setForeground(Color.gray);
-            } else {
-                renderer.setForeground(Color.black);
-            }
-
-            return this;
+        public void actionPerformed(ActionEvent ae) {
+            cloneModelSettings();
         }
-
     }
 
 
-
-//    Action addModelAction = new AbstractAction("+") {
-//        public void actionPerformed(ActionEvent ae) {
-//            createModel();
-//        }
-//    };
 }
\ No newline at end of file
diff --git a/src/dr/app/beauti/clockModelsPanel/CloneModelDialog.java b/src/dr/app/beauti/clockModelsPanel/CloneModelDialog.java
new file mode 100644
index 0000000..db09c48
--- /dev/null
+++ b/src/dr/app/beauti/clockModelsPanel/CloneModelDialog.java
@@ -0,0 +1,94 @@
+/*
+ * CloneModelDialog.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beauti.clockModelsPanel;
+
+import dr.app.beauti.options.PartitionClockModel;
+import dr.app.beauti.util.PanelUtils;
+import jam.panels.OptionsPanel;
+
+import javax.swing.*;
+import javax.swing.border.EmptyBorder;
+import java.util.List;
+
+/**
+ * @author Andrew Rambaut
+ * @version $Id$
+ */
+public class CloneModelDialog {
+
+    private JFrame frame;
+
+    JComboBox sourceModelCombo;
+
+    OptionsPanel optionPanel;
+
+    public CloneModelDialog(JFrame frame) {
+        this.frame = frame;
+
+        sourceModelCombo = new JComboBox();
+        PanelUtils.setupComponent(sourceModelCombo);
+        sourceModelCombo
+                .setToolTipText("<html>Select the substitution model to act as a source<br>to copy to the other selected models.</html>");
+
+        optionPanel = new OptionsPanel(12, 12);
+        optionPanel.addSpanningComponent(new JLabel("<html>Select the substitution model to act as a source<br>to copy to the other selected models.</html>"));
+        optionPanel.addComponentWithLabel("Source Model:", sourceModelCombo);
+    }
+
+    public int showDialog(List<PartitionClockModel> sourceModels) {
+
+        JOptionPane optionPane = new JOptionPane(optionPanel,
+                JOptionPane.QUESTION_MESSAGE,
+                JOptionPane.OK_CANCEL_OPTION,
+                null,
+                null,
+                null);
+        optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
+
+        sourceModelCombo.removeAllItems();
+        for (PartitionClockModel model : sourceModels) {
+            sourceModelCombo.addItem(model);
+        }
+
+        final JDialog dialog = optionPane.createDialog(frame, "Clone model settings");
+        dialog.pack();
+
+        dialog.setVisible(true);
+
+        int result = JOptionPane.CANCEL_OPTION;
+        Integer value = (Integer) optionPane.getValue();
+        if (value != null && value != -1) {
+            result = value;
+        }
+
+        return result;
+    }
+
+    public PartitionClockModel getSourceModel() {
+        return (PartitionClockModel)sourceModelCombo.getSelectedItem();
+    }
+
+}
\ No newline at end of file
diff --git a/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java b/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java
index 36d9f5e..310b815 100644
--- a/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java
+++ b/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java
@@ -57,6 +57,7 @@ import java.util.List;
  * @author Alexei Drummond
  * @author Walter Xie
  * @version $Id: ClockModelPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
+ * @deprecated
  */
 public class OldClockModelsPanel extends BeautiPanel implements Exportable {
 
diff --git a/src/dr/app/beauti/clockModelsPanel/PartitionClockModelPanel.java b/src/dr/app/beauti/clockModelsPanel/PartitionClockModelPanel.java
index fe65f52..4c08d85 100644
--- a/src/dr/app/beauti/clockModelsPanel/PartitionClockModelPanel.java
+++ b/src/dr/app/beauti/clockModelsPanel/PartitionClockModelPanel.java
@@ -27,6 +27,7 @@ package dr.app.beauti.clockModelsPanel;
 
 import dr.app.beauti.options.PartitionClockModel;
 import dr.app.beauti.types.ClockDistributionType;
+import dr.app.beauti.types.ClockType;
 import dr.app.beauti.util.PanelUtils;
 import dr.app.util.OSType;
 import jam.panels.OptionsPanel;
@@ -34,20 +35,25 @@ import jam.panels.OptionsPanel;
 import javax.swing.*;
 import java.awt.event.ItemEvent;
 import java.awt.event.ItemListener;
+import java.util.EnumSet;
 
 /**
- * @author Alexei Drummond
  * @author Andrew Rambaut
- * @author Walter Xie
- * @deprecated
  */
 public class PartitionClockModelPanel extends OptionsPanel {
 
     // Components
     private static final long serialVersionUID = -1645661616353099424L;
 
-//    private JComboBox clockTypeCombo = new JComboBox(ClockType.values());
-    private JComboBox clockDistributionCombo = new JComboBox(ClockDistributionType.values());
+    private JComboBox clockTypeCombo = new JComboBox(EnumSet.range(
+            ClockType.STRICT_CLOCK, ClockType.FIXED_LOCAL_CLOCK).toArray());
+    private JComboBox clockDistributionCombo = new JComboBox (new ClockDistributionType[] {
+            ClockDistributionType.LOGNORMAL,
+            ClockDistributionType.GAMMA,
+//            ClockDistributionType.CAUCHY,
+            ClockDistributionType.EXPONENTIAL
+    });
+    private JCheckBox continuousQuantileCheck = new JCheckBox("Use continuous quantile parameterization.");
 
     protected final PartitionClockModel model;
 
@@ -57,28 +63,40 @@ public class PartitionClockModelPanel extends OptionsPanel {
 
         this.model = partitionModel;
 
-//        PanelUtils.setupComponent(clockTypeCombo);
-//        clockTypeCombo.addItemListener(new ItemListener() {
-//            public void itemStateChanged(ItemEvent ev) {
-//                model.setClockType((ClockType) clockTypeCombo.getSelectedItem());
-//                setupPanel();
-//            }
-//        });
-//        clockTypeCombo.setToolTipText("<html>Select the type of molecular clock model.</html>");
-//
-//        clockTypeCombo.setSelectedItem(model.getClockType());
+        PanelUtils.setupComponent(clockTypeCombo);
+        clockTypeCombo.addItemListener(new ItemListener() {
+            public void itemStateChanged(ItemEvent ev) {
+                model.setClockType((ClockType) clockTypeCombo.getSelectedItem());
+                setupPanel();
+            }
+        });
+        clockTypeCombo.setToolTipText("<html>Select the type of molecular clock model.</html>");
+
+        clockTypeCombo.setSelectedItem(model.getClockType());
 
         PanelUtils.setupComponent(clockDistributionCombo);
         clockDistributionCombo.addItemListener(new ItemListener() {
             public void itemStateChanged(ItemEvent ev) {
                 model.setClockDistributionType((ClockDistributionType) clockDistributionCombo.getSelectedItem());
-                setupPanel();
             }
         });
         clockDistributionCombo.setToolTipText("<html>Select the distribution that describes the variation in rate.</html>");
 
         clockDistributionCombo.setSelectedItem(model.getClockDistributionType());
 
+        PanelUtils.setupComponent(continuousQuantileCheck);
+        continuousQuantileCheck.setToolTipText("<html>" +
+                "Select this option to use the continuous quantile form of the relaxed<br>" +
+                "clock model described by Li & Drummond (2012) MBE 29:751-61 instead of<br>" +
+                "the discretized categorical form.<html>");
+        continuousQuantileCheck.setSelected(model.isContinuousQuantile());
+        continuousQuantileCheck.addItemListener(
+                new ItemListener() {
+                    public void itemStateChanged(ItemEvent ev) {
+                        model.setContinuousQuantile(continuousQuantileCheck.isSelected());
+                    }
+                });
+
         setupPanel();
         setOpaque(false);
     }
@@ -89,24 +107,48 @@ public class PartitionClockModelPanel extends OptionsPanel {
      */
     public void setupPanel() {
         removeAll();
-//        addComponentWithLabel("Clock Type:", clockTypeCombo);
+        addComponentWithLabel("Clock Type:", clockTypeCombo);
 
         switch (model.getClockType()) {
             case STRICT_CLOCK:
                 break;
 
             case UNCORRELATED:
+                addComponent(new JLabel(
+                        "<html>" +
+                                "Using the uncorrelated relaxed clock model of Drummond, Ho, Phillips & <br>" +
+                                "Rambaut (2006) PLoS Biology 4, e88.<html>"));
+                addComponentWithLabel("Relaxed Distribution:", clockDistributionCombo);
+                addComponent(continuousQuantileCheck);
+                break;
+
             case AUTOCORRELATED:
                 addComponentWithLabel("Relaxed Distribution:", clockDistributionCombo);
                 break;
 
             case RANDOM_LOCAL_CLOCK:
+            case FIXED_LOCAL_CLOCK:
                 break;
 
             default:
-                throw new IllegalArgumentException("Unknown data type");
+                throw new IllegalArgumentException("Unknown clock model type");
 
         }
+
+    }
+
+    /**
+     * Sets the components up according to the partition model - but does not
+     * layout the top level options panel.
+     */
+    public void setOptions() {
+
+        if (model == null) {
+            return;
+        }
+        setupPanel();
+        setOpaque(false);
+
     }
 
 }
diff --git a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
index 263190e..fa3e539 100644
--- a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
+++ b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
@@ -173,7 +173,7 @@ public class AncestralStatesComponentGenerator extends BaseComponentGenerator {
         for (AbstractPartitionData partition : options.getDataPartitions()) {
 
             if (component.dNdSRobustCounting(partition)) {
-                writeCodonPartitionedRobustCounting(writer, partition);
+                writeCodonPartitionedRobustCounting(writer, partition, component.isCompleteHistoryLogging(partition));
             }
         }
     }
@@ -181,7 +181,8 @@ public class AncestralStatesComponentGenerator extends BaseComponentGenerator {
     // Called for each model that requires robust counting (can be more than
     // one)
     private void writeCodonPartitionedRobustCounting(XMLWriter writer,
-                                                     AbstractPartitionData partition) {
+                                                     AbstractPartitionData partition,
+                                                     boolean isCompleteHistoryLogging) {
 
 //        if (DEBUG) {
 //            System.err.println("DEBUG: Writing RC for " + partition.getName());
@@ -198,6 +199,8 @@ public class AncestralStatesComponentGenerator extends BaseComponentGenerator {
                         new Attribute.Default<String>("id", prefix + "robustCounting1"),
                         new Attribute.Default<String>("labeling", "S"),
                         new Attribute.Default<String>("prefix", prefix),
+                        new Attribute.Default<String>("saveCompleteHistory",
+                                isCompleteHistoryLogging ? "true" : "false"),
                         new Attribute.Default<String>("useUniformization",
                                 "true"),
                         new Attribute.Default<String>("unconditionedPerBranch",
diff --git a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentOptions.java b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentOptions.java
index 31fbe5c..40f7149 100644
--- a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentOptions.java
+++ b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentOptions.java
@@ -98,6 +98,15 @@ public class AncestralStatesComponentOptions implements ComponentOptions {
         getOptions(partition).countingStates = isCountingStates;
     }
 
+    public boolean isCompleteHistoryLogging(final AbstractPartitionData partition) {
+        return getOptions(partition).isCompleteHistoryLogging;
+    }
+
+    public void setCompleteHistoryLogging(final AbstractPartitionData partition, boolean isCompleteHistoryLogging) {
+        getOptions(partition).isCompleteHistoryLogging = isCompleteHistoryLogging;
+    }
+
+
     public boolean dNdSRobustCounting(final AbstractPartitionData partition) {
         return getOptions(partition).dNdSRobustCounting;
     }
@@ -125,6 +134,7 @@ public class AncestralStatesComponentOptions implements ComponentOptions {
         boolean reconstructAtMRCA = false;
         String mrcaTaxonSetName = null;
         boolean countingStates = false;
+        boolean isCompleteHistoryLogging = false;
         boolean dNdSRobustCounting = false;
     };
 
diff --git a/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java b/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
index 24ccce6..d93446c 100644
--- a/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
+++ b/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
@@ -28,6 +28,8 @@ package dr.app.beauti.components.discrete;
 import dr.app.beagle.evomodel.parsers.MarkovJumpsTreeLikelihoodParser;
 import dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions;
 import dr.app.beauti.generator.BaseComponentGenerator;
+import dr.app.beauti.generator.BeastGenerator;
+import dr.app.beauti.generator.BranchRatesModelGenerator;
 import dr.app.beauti.generator.ComponentGenerator;
 import dr.app.beauti.options.*;
 import dr.app.beauti.util.XMLWriter;
@@ -402,27 +404,7 @@ public class DiscreteTraitsComponentGenerator extends BaseComponentGenerator {
         writer.writeIDref(SiteModel.SITE_MODEL, substModel.getName() + "." + SiteModel.SITE_MODEL);
         writer.writeIDref(GeneralSubstitutionModelParser.GENERAL_SUBSTITUTION_MODEL, substModel.getName() + "." + AbstractSubstitutionModel.MODEL);
 
-        switch (clockModel.getClockType()) {
-            case STRICT_CLOCK:
-                writer.writeIDref(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES,
-                        clockModel.getPrefix() + BranchRateModel.BRANCH_RATES);
-                break;
-            case UNCORRELATED:
-                writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
-                        clockModel.getPrefix() + BranchRateModel.BRANCH_RATES);
-                break;
-            case RANDOM_LOCAL_CLOCK:
-                writer.writeIDref(RandomLocalClockModelParser.LOCAL_BRANCH_RATES,
-                        clockModel.getPrefix() + BranchRateModel.BRANCH_RATES);
-                break;
-            case AUTOCORRELATED:
-                writer.writeIDref(ACLikelihoodParser.AC_LIKELIHOOD,
-                        clockModel.getPrefix() + BranchRateModel.BRANCH_RATES);
-                break;
-
-            default:
-                throw new IllegalArgumentException("Unknown clock model");
-        }
+        BranchRatesModelGenerator.writeBranchRatesModelRef(clockModel, writer);
 
         if (substModel.getDiscreteSubstType() == DiscreteSubstModelType.ASYM_SUBST) {
             int stateCount = options.getStatesForDiscreteModel(substModel).size();
diff --git a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MLEGSSDialog.java b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MLEGSSDialog.java
index 4a38a9e..6018950 100644
--- a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MLEGSSDialog.java
+++ b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MLEGSSDialog.java
@@ -34,6 +34,7 @@ import jam.panels.OptionsPanel;
 import javax.swing.*;
 import javax.swing.border.EmptyBorder;
 import java.awt.event.ActionEvent;
+import java.awt.event.KeyEvent;
 
 
 /**
@@ -84,7 +85,7 @@ public class MLEGSSDialog {
         pathStepsField.setColumns(16);
         pathStepsField.setMinimumSize(pathStepsField.getPreferredSize());
         labelPathSteps = optionsPanel.addComponentWithLabel("Number of stepping stones:", pathStepsField);
-        /*pathStepsField.addKeyListener(new java.awt.event.KeyListener() {
+        pathStepsField.addKeyListener(new java.awt.event.KeyListener() {
             public void keyTyped(KeyEvent e) {
             }
 
@@ -92,15 +93,15 @@ public class MLEGSSDialog {
             }
 
             public void keyReleased(KeyEvent e) {
-                //options.pathSteps = pathStepsField.getValue();
+                options.pathSteps = pathStepsField.getValue();
             }
-        });*/
+        });
 
         chainLengthField.setValue(1000000);
         chainLengthField.setColumns(16);
         chainLengthField.setMinimumSize(chainLengthField.getPreferredSize());
         labelChainLength = optionsPanel.addComponentWithLabel("Length of chains:", chainLengthField);
-        /*chainLengthField.addKeyListener(new java.awt.event.KeyListener() {
+        chainLengthField.addKeyListener(new java.awt.event.KeyListener() {
             public void keyTyped(KeyEvent e) {
             }
 
@@ -108,9 +109,9 @@ public class MLEGSSDialog {
             }
 
             public void keyReleased(KeyEvent e) {
-                //options.mleChainLength = chainLengthField.getValue();
+                options.mleChainLength = chainLengthField.getValue();
             }
-        });*/
+        });
 
         optionsPanel.addSeparator();
 
@@ -118,7 +119,7 @@ public class MLEGSSDialog {
         logEveryField.setColumns(16);
         logEveryField.setMinimumSize(logEveryField.getPreferredSize());
         labelLogEvery = optionsPanel.addComponentWithLabel("Log likelihood every:", logEveryField);
-        /*logEveryField.addKeyListener(new java.awt.event.KeyListener() {
+        logEveryField.addKeyListener(new java.awt.event.KeyListener() {
             public void keyTyped(KeyEvent e) {
             }
 
@@ -126,9 +127,9 @@ public class MLEGSSDialog {
             }
 
             public void keyReleased(KeyEvent e) {
-                //options.mleLogEvery = logEveryField.getValue();
+                options.mleLogEvery = logEveryField.getValue();
             }
-        });*/
+        });
 
         optionsPanel.addSeparator();
 
@@ -136,7 +137,7 @@ public class MLEGSSDialog {
         logFileNameField.setEditable(false);
         logFileNameField.setMinimumSize(logFileNameField.getPreferredSize());
         labelLogFileName = optionsPanel.addComponentWithLabel("Log file name:", logFileNameField);
-        /*logFileNameField.addKeyListener(new java.awt.event.KeyListener() {
+        logFileNameField.addKeyListener(new java.awt.event.KeyListener() {
             public void keyTyped(KeyEvent e) {
             }
 
@@ -146,7 +147,7 @@ public class MLEGSSDialog {
             public void keyReleased(KeyEvent e) {
                 //options.mleFileName = logFileNameField.getText();
             }
-        });*/
+        });
 
         optionsPanel.addSeparator();
 
@@ -162,6 +163,11 @@ public class MLEGSSDialog {
         treeWorkingPrior.addActionListener(new java.awt.event.ActionListener() {
             public void actionPerformed(ActionEvent e) {
                 String selection = (String)((JComboBox)e.getSource()).getSelectedItem();
+                if (selection.equals("Matching coalescent model")) {
+                    beautiOptions.logCoalescentEventsStatistic = false;
+                } else {
+                    beautiOptions.logCoalescentEventsStatistic = true;
+                }
                 TreePriorType treePrior = beautiOptions.getPartitionTreePriors().get(0).getNodeHeightPrior();
                 boolean mcmAllowed = false;
                 if (treePrior.equals(TreePriorType.CONSTANT) || treePrior.equals(TreePriorType.EXPONENTIAL)
@@ -213,9 +219,8 @@ public class MLEGSSDialog {
         PanelUtils.setupComponent(mleTutorial);
         optionsPanel.addSpanningComponent(mleTutorial);
 
-        JTextArea citationText = new JTextArea("Baele G, Lemey P, Suchard MA (2015) Working priors for " +
-                "accurate model \nselection while accommodating phylogenetic uncertainty in a \ncoalescent-based " +
-                "framework [GSS Paper].");
+        JTextArea citationText = new JTextArea("Baele G, Lemey P, Suchard MA (2015) Genealogical working " +
+                "distributions for Bayesian \nmodel testing with phylogenetic uncertainty [GSS Paper].");
         citationText.setColumns(45);
         optionsPanel.addComponentWithLabel("Citation:", citationText);
 
diff --git a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
index 55cb072..af786ee 100644
--- a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
+++ b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
@@ -25,7 +25,11 @@
 
 package dr.app.beauti.components.marginalLikelihoodEstimation;
 
+import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.generator.BaseComponentGenerator;
+import dr.app.beauti.generator.ComponentGenerator;
+import dr.app.beauti.generator.Generator;
+import dr.app.beauti.generator.TreePriorGenerator;
 import dr.app.beauti.options.*;
 import dr.app.beauti.types.*;
 import dr.app.beauti.util.XMLWriter;
@@ -46,6 +50,7 @@ import dr.util.Attribute;
 import dr.xml.XMLParser;
 
 import java.util.ArrayList;
+import java.util.EnumSet;
 import java.util.List;
 
 /**
@@ -55,7 +60,7 @@ import java.util.List;
  */
 public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerator {
 
-    public static final boolean DEBUG = true;
+    public static final boolean DEBUG = false;
 
     private BeautiOptions beautiOptions = null;
 
@@ -64,6 +69,44 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
         this.beautiOptions = options;
     }
 
+    @Override
+    public void checkOptions() throws GeneratorException {
+        MarginalLikelihoodEstimationOptions mleOptions = (MarginalLikelihoodEstimationOptions)options.getComponentOptions(MarginalLikelihoodEstimationOptions.class);
+
+        //++++++++++++++++ Improper priors ++++++++++++++++++
+        if (mleOptions.performMLE) {
+            for (Parameter param : options.selectParameters()) {
+                if (param.isPriorImproper() || (param.priorType == PriorType.ONE_OVER_X_PRIOR && !param.getBaseName().contains("popSize"))) {
+                    throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
+                            "\nhas an improper prior and will not sample correctly when estimating " +
+                            "the marginal likelihood. " +
+                            "\nPlease check the Prior panel.", BeautiFrame.PRIORS);
+                }
+            }
+        }
+
+        //++++++++++++++++ Coalescent Events available for GSS ++++++++++++++++++
+        if (mleOptions.performMLEGSS) {
+            EnumSet<TreePriorType> allowedTypes = EnumSet.of(
+                    TreePriorType.CONSTANT, TreePriorType.EXPONENTIAL, TreePriorType.LOGISTIC, TreePriorType.EXPANSION, TreePriorType.SKYGRID, TreePriorType.GMRF_SKYRIDE
+            );
+            EnumSet<TreePriorType> allowedMCMTypes = EnumSet.of(TreePriorType.CONSTANT, TreePriorType.EXPONENTIAL, TreePriorType.LOGISTIC, TreePriorType.EXPANSION);
+            for (PartitionTreeModel model : options.getPartitionTreeModels()) {
+                PartitionTreePrior prior = model.getPartitionTreePrior();
+                if (!allowedTypes.contains(prior.getNodeHeightPrior())) {
+                    throw new GeneratorException("Generalized stepping stone sampling can only be performed\n" +
+                            "on standard parameteric coalescent tree priors and the Skyride and Skygrid models. " +
+                            "\nPlease check the Trees panel.", BeautiFrame.TREES);
+                }
+                if (mleOptions.choiceTreeWorkingPrior.equals("Matching coalescent model") && !allowedMCMTypes.contains(prior.getNodeHeightPrior())) {
+                    throw new GeneratorException("A Matching Coalescent Model cannot be constructed for\n" +
+                            "the Skyride and Skygrid models. Please check the Marginal Likelihood\n" +
+                            "Estimation settings via the MCMC panel.");
+                }
+            }
+        }
+    }
+
     public boolean usesInsertionPoint(final InsertionPoint point) {
         MarginalLikelihoodEstimationOptions component = (MarginalLikelihoodEstimationOptions) options.getComponentOptions(MarginalLikelihoodEstimationOptions.class);
 
@@ -74,6 +117,8 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
         switch (point) {
             case AFTER_MCMC:
                 return true;
+            case IN_FILE_LOG_PARAMETERS:
+                return options.logCoalescentEventsStatistic;
         }
         return false;
     }
@@ -90,6 +135,11 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
             case AFTER_MCMC:
                 writeMLE(writer, component);
                 break;
+            case IN_FILE_LOG_PARAMETERS:
+                if (options.logCoalescentEventsStatistic) {
+                    writeCoalescentEventsStatistic(writer);
+                }
+                break;
             default:
                 throw new IllegalArgumentException("This insertion point is not implemented for " + this.getClass().getName());
         }
@@ -179,12 +229,12 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                 List<Attribute> attributes = new ArrayList<Attribute>();
                 attributes.add(new Attribute.Default<String>(XMLParser.ID, "exponentials"));
                 attributes.add(new Attribute.Default<String>("fileName", beautiOptions.logFileName));
-                attributes.add(new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10));
+                attributes.add(new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10)));
                 attributes.add(new Attribute.Default<String>("parameterColumn", "coalescentEventsStatistic"));
                 attributes.add(new Attribute.Default<String>("dimension", "" + (beautiOptions.taxonList.getTaxonCount()-1)));
 
                 writer.writeOpenTag(TreeWorkingPriorParsers.PRODUCT_OF_EXPONENTIALS_POSTERIOR_MEANS_LOESS, attributes);
-                writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.ID, TreeModel.TREE_MODEL), true);
+                writer.writeIDref(TreeModel.TREE_MODEL, TreeModel.TREE_MODEL);
                 writer.writeCloseTag(TreeWorkingPriorParsers.PRODUCT_OF_EXPONENTIALS_POSTERIOR_MEANS_LOESS);
 
             } else {
@@ -403,7 +453,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                 new Attribute[]{
                                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                         new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "kappa"),
-                                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                                 });
                                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "kappa");
                                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -413,7 +463,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + "kappa"),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "kappa");
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -427,7 +477,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                 new Attribute[]{
                                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                         new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "kappa1"),
-                                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                                 });
                                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "kappa1");
                                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -435,7 +485,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                 new Attribute[]{
                                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                         new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "kappa2"),
-                                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                                 });
                                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "kappa2");
                                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -445,7 +495,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + "kappa1"),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "kappa1");
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -453,7 +503,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + "kappa2"),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "kappa2");
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -468,7 +518,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                     new Attribute[]{
                                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                             new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + rateName),
-                                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                                     });
                                             writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + rateName);
                                             writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -480,7 +530,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                 new Attribute[]{
                                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                         new Attribute.Default<String>("parameterColumn", model.getPrefix() + rateName),
-                                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                                 });
                                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + rateName);
                                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -498,7 +548,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "frequencies"),
                                                     new Attribute.Default<Integer>("dimension", 4),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "frequencies");
                                     writer.writeCloseTag(WorkingPriorParsers.LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -509,7 +559,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                                 new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                 new Attribute.Default<String>("parameterColumn", model.getPrefix() + "frequencies"),
                                                 new Attribute.Default<Integer>("dimension", 4),
-                                                new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                         });
                                 writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "frequencies");
                                 writer.writeCloseTag(WorkingPriorParsers.LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -529,7 +579,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "alpha"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "alpha");
                             writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -539,7 +589,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", model.getPrefix() + "alpha"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "alpha");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -553,7 +603,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", model.getPrefix(i) + "pInv"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "pInv");
                             writer.writeCloseTag(WorkingPriorParsers.LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -563,7 +613,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", model.getPrefix() + "pInv"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "pInv");
                         writer.writeCloseTag(WorkingPriorParsers.LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -579,7 +629,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", model.getPrefix() + "clock.rate"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "clock.rate");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -593,7 +643,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + ClockType.UCLD_MEAN),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_MEAN);
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -601,7 +651,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + ClockType.UCLD_STDEV),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -610,7 +660,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + ClockType.UCLD_MEAN),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_MEAN);
                                     writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
@@ -618,7 +668,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                     new Attribute.Default<String>("parameterColumn", model.getPrefix() + ClockType.UCLD_STDEV),
-                                                    new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                    new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                             });
                                     writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
                                     writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -630,7 +680,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                         new Attribute[]{
                                                 new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                                 new Attribute.Default<String>("parameterColumn", model.getPrefix() + ClockType.UCED_MEAN),
-                                                new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                                new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                         });
                                 writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCED_MEAN);
                                 writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -648,13 +698,17 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                 TreePriorType nodeHeightPrior = model.getNodeHeightPrior();
                 TreePriorParameterizationType parameterization = model.getParameterization();
 
+                if (DEBUG) {
+                    System.err.println("nodeHeightPrior: " + nodeHeightPrior);
+                }
+
                 switch (nodeHeightPrior) {
                     case CONSTANT:
                         writer.writeOpenTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR,
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "constant.popSize"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "constant.popSize");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -665,7 +719,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "exponential.popSize"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "exponential.popSize");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -675,7 +729,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "exponential.growthRate"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "exponential.growthRate");
                             writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
@@ -684,7 +738,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "exponential.doublingTime"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "exponential.doublingTime");
                             writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -697,7 +751,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "logistic.popSize"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "logistic.popSize");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -707,7 +761,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "logistic.growthRate"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "logistic.growthRate");
                             writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
@@ -716,7 +770,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "logistic.doublingTime"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "logistic.doublingTime");
                             writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -726,7 +780,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "logistic.t50"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "logistic.t50");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -738,7 +792,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "expansion.popSize"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "expansion.popSize");
                         writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -748,7 +802,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "expansion.growthRate"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "expansion.growthRate");
                             writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
@@ -757,7 +811,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                     new Attribute[]{
                                             new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                             new Attribute.Default<String>("parameterColumn", "expansion.doublingTime"),
-                                            new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength * 0.10)
+                                            new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
                                     });
                             writer.writeIDref(ParameterParser.PARAMETER, "expansion.doublingTime");
                             writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
@@ -767,19 +821,64 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                                 new Attribute[]{
                                         new Attribute.Default<String>("fileName", beautiOptions.logFileName),
                                         new Attribute.Default<String>("parameterColumn", "expansion.ancestralProportion"),
-                                        new Attribute.Default<String>("burnin", "" + beautiOptions.chainLength*0.10)
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
                                 });
                         writer.writeIDref(ParameterParser.PARAMETER, "expansion.ancestralProportion");
                         writer.writeCloseTag(WorkingPriorParsers.LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
 
                         break;
+
+                    case GMRF_SKYRIDE:
+
+                        writer.writeOpenTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR,
+                                new Attribute[]{
+                                        new Attribute.Default<String>("fileName", beautiOptions.logFileName),
+                                        new Attribute.Default<String>("parameterColumn", "skyride.logPopSize"),
+                                        new Attribute.Default<Integer>("dimension", beautiOptions.taxonList.getTaxonCount() - 1),
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
+                                });
+                        writer.writeIDref(ParameterParser.PARAMETER, "skyride.logPopSize");
+                        writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
+
+                        writer.writeOpenTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR,
+                                new Attribute[]{
+                                        new Attribute.Default<String>("fileName", beautiOptions.logFileName),
+                                        new Attribute.Default<String>("parameterColumn", "skyride.precision"),
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
+                                });
+                        writer.writeIDref(ParameterParser.PARAMETER, "skyride.precision");
+                        writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
+
+                        break;
+
+                    case SKYGRID:
+
+                        writer.writeOpenTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR,
+                                new Attribute[]{
+                                        new Attribute.Default<String>("fileName", beautiOptions.logFileName),
+                                        new Attribute.Default<String>("parameterColumn", "skygrid.logPopSize"),
+                                        new Attribute.Default<Integer>("dimension", model.getSkyGridCount()),
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength*0.10))
+                                });
+                        writer.writeIDref(ParameterParser.PARAMETER, "skygrid.logPopSize");
+                        writer.writeCloseTag(WorkingPriorParsers.NORMAL_REFERENCE_PRIOR);
+
+                        writer.writeOpenTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR,
+                                new Attribute[]{
+                                        new Attribute.Default<String>("fileName", beautiOptions.logFileName),
+                                        new Attribute.Default<String>("parameterColumn", "skygrid.precision"),
+                                        new Attribute.Default<String>("burnin", "" + (int)(beautiOptions.chainLength * 0.10))
+                                });
+                        writer.writeIDref(ParameterParser.PARAMETER, "skygrid.precision");
+                        writer.writeCloseTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR);
+
+                        break;
+
                 }
             }
 
             //TODO: take care of anything else I missed
 
-
-
             if (options.choiceTreeWorkingPrior.equals("Product of exponential distributions")) {
                 writer.writeIDref("productOfExponentialsPosteriorMeansLoess", "exponentials");
             } else {
@@ -813,6 +912,26 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
 
     }
 
+    private void writeCoalescentEventsStatistic(XMLWriter writer) {
+        writer.writeOpenTag("coalescentEventsStatistic");
+        // coalescentLikelihood
+        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
+            PartitionTreePrior prior = model.getPartitionTreePrior();
+            TreePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer);
+            writer.writeText("");
+        }
+
+            /*for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
+                if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) {
+                    writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT); // only 1 coalescent
+                } else if (prior.getNodeHeightPrior() == TreePriorType.SKYGRID) {
+                    writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYGRID_LIKELIHOOD, prior.getPrefix() + "skygrid");
+                }
+            }*/
+        writer.writeCloseTag("coalescentEventsStatistic");
+    }
+
+
     private void writeParameterIdref(XMLWriter writer, Parameter parameter) {
         if (parameter.isStatistic) {
             writer.writeIDref("statistic", parameter.getName());
diff --git a/src/dr/app/beauti/generator/BaseComponentGenerator.java b/src/dr/app/beauti/generator/BaseComponentGenerator.java
index 5c1b9cf..567cfbd 100644
--- a/src/dr/app/beauti/generator/BaseComponentGenerator.java
+++ b/src/dr/app/beauti/generator/BaseComponentGenerator.java
@@ -41,6 +41,11 @@ public abstract class BaseComponentGenerator extends Generator implements Compon
         super(options, null);
     }
 
+    @Override
+    public void checkOptions() throws GeneratorException {
+        // default is to do nothing
+    }
+
     public void generateAtInsertionPoint(Generator generator, final InsertionPoint point, final Object item, final XMLWriter writer) {
         callingGenerator = generator;
         writer.writeComment("START " + getCommentLabel());
diff --git a/src/dr/app/beauti/generator/BeastGenerator.java b/src/dr/app/beauti/generator/BeastGenerator.java
index e2d66ea..5ef0066 100644
--- a/src/dr/app/beauti/generator/BeastGenerator.java
+++ b/src/dr/app/beauti/generator/BeastGenerator.java
@@ -156,95 +156,95 @@ public class BeastGenerator extends Generator {
                 }
             }
 
-        //++++++++++++++++ Taxon List ++++++++++++++++++
-        TaxonList taxonList = options.taxonList;
-        Set<String> ids = new HashSet<String>();
+            //++++++++++++++++ Taxon List ++++++++++++++++++
+            TaxonList taxonList = options.taxonList;
+            Set<String> ids = new HashSet<String>();
 
-        ids.add(TaxaParser.TAXA);
-        ids.add(AlignmentParser.ALIGNMENT);
-        ids.add(TraitData.TRAIT_SPECIES);
+            ids.add(TaxaParser.TAXA);
+            ids.add(AlignmentParser.ALIGNMENT);
+            ids.add(TraitData.TRAIT_SPECIES);
 
-        if (taxonList != null) {
-            if (taxonList.getTaxonCount() < 2) {
-                throw new GeneratorException("BEAST requires at least two taxa to run.");
-            }
+            if (taxonList != null) {
+                if (taxonList.getTaxonCount() < 2) {
+                    throw new GeneratorException("BEAST requires at least two taxa to run.");
+                }
 
-            for (int i = 0; i < taxonList.getTaxonCount(); i++) {
-                Taxon taxon = taxonList.getTaxon(i);
-                if (ids.contains(taxon.getId())) {
-                    throw new GeneratorException("A taxon has the same id," + taxon.getId() +  MESSAGE_CAL);
+                for (int i = 0; i < taxonList.getTaxonCount(); i++) {
+                    Taxon taxon = taxonList.getTaxon(i);
+                    if (ids.contains(taxon.getId())) {
+                        throw new GeneratorException("A taxon has the same id," + taxon.getId() +  MESSAGE_CAL);
+                    }
+                    ids.add(taxon.getId());
                 }
-                ids.add(taxon.getId());
             }
-        }
 
-        //++++++++++++++++ Taxon Sets ++++++++++++++++++
-        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
-            // should be only 1 calibrated internal node with a proper prior and monophyletic for each tree at moment
-            if (model.getPartitionTreePrior().getNodeHeightPrior() == TreePriorType.YULE_CALIBRATION) {
-                if (options.treeModelOptions.isNodeCalibrated(model) < 0) // invalid node calibration
-                    throw new GeneratorException(MESSAGE_CAL_YULE);
-
-                if (options.treeModelOptions.isNodeCalibrated(model) > 0) { // internal node calibration
-                    List taxonSetsList = options.getKeysFromValue(options.taxonSetsTreeModel, model);
-                    if (taxonSetsList.size() != 1 || !options.taxonSetsMono.get(taxonSetsList.get(0))) { // 1 tmrca per tree && monophyletic
-                        throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
+            //++++++++++++++++ Taxon Sets ++++++++++++++++++
+            for (PartitionTreeModel model : options.getPartitionTreeModels()) {
+                // should be only 1 calibrated internal node with a proper prior and monophyletic for each tree at moment
+                if (model.getPartitionTreePrior().getNodeHeightPrior() == TreePriorType.YULE_CALIBRATION) {
+                    if (options.treeModelOptions.isNodeCalibrated(model) < 0) // invalid node calibration
+                        throw new GeneratorException(MESSAGE_CAL_YULE);
+
+                    if (options.treeModelOptions.isNodeCalibrated(model) > 0) { // internal node calibration
+                        List taxonSetsList = options.getKeysFromValue(options.taxonSetsTreeModel, model);
+                        if (taxonSetsList.size() != 1 || !options.taxonSetsMono.get(taxonSetsList.get(0))) { // 1 tmrca per tree && monophyletic
+                            throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
+                        }
                     }
                 }
             }
-        }
 
-        for (Taxa taxa : options.taxonSets) {
-            // AR - we should allow single taxon taxon sets...
-            if (taxa.getTaxonCount() < 1 // && !options.taxonSetsIncludeStem.get(taxa)
-                    ) {
-                throw new GeneratorException(
-                        "Taxon set, " + taxa.getId() + ", should contain \n" +
-                                "at least one taxa. Please go back to Taxon Sets \n" +
-                                "panel to correct this.", BeautiFrame.TAXON_SETS);
-            }
-            if (ids.contains(taxa.getId())) {
-                throw new GeneratorException("A taxon set has the same id," + taxa.getId() +
-                        MESSAGE_CAL, BeautiFrame.TAXON_SETS);
+            for (Taxa taxa : options.taxonSets) {
+                // AR - we should allow single taxon taxon sets...
+                if (taxa.getTaxonCount() < 1 // && !options.taxonSetsIncludeStem.get(taxa)
+                        ) {
+                    throw new GeneratorException(
+                            "Taxon set, " + taxa.getId() + ", should contain \n" +
+                                    "at least one taxa. Please go back to Taxon Sets \n" +
+                                    "panel to correct this.", BeautiFrame.TAXON_SETS);
+                }
+                if (ids.contains(taxa.getId())) {
+                    throw new GeneratorException("A taxon set has the same id," + taxa.getId() +
+                            MESSAGE_CAL, BeautiFrame.TAXON_SETS);
+                }
+                ids.add(taxa.getId());
             }
-            ids.add(taxa.getId());
-        }
 
-        //++++++++++++++++ *BEAST ++++++++++++++++++
-        if (options.useStarBEAST) {
-            if (!options.traitExists(TraitData.TRAIT_SPECIES))
-                throw new GeneratorException("A trait labelled \"species\" is required for *BEAST species designations." +
-                        "\nPlease create or import the species designations in the Traits table.", BeautiFrame.TRAITS);
+            //++++++++++++++++ *BEAST ++++++++++++++++++
+            if (options.useStarBEAST) {
+                if (!options.traitExists(TraitData.TRAIT_SPECIES))
+                    throw new GeneratorException("A trait labelled \"species\" is required for *BEAST species designations." +
+                            "\nPlease create or import the species designations in the Traits table.", BeautiFrame.TRAITS);
 
-            //++++++++++++++++ Species Sets ++++++++++++++++++
-            // should be only 1 calibrated internal node with monophyletic at moment
-            if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE_CALIBRATION) {
-                if (options.speciesSets.size() != 1 || !options.speciesSetsMono.get(options.speciesSets.get(0))) {
-                    throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
+                //++++++++++++++++ Species Sets ++++++++++++++++++
+                // should be only 1 calibrated internal node with monophyletic at moment
+                if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE_CALIBRATION) {
+                    if (options.speciesSets.size() != 1 || !options.speciesSetsMono.get(options.speciesSets.get(0))) {
+                        throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
+                    }
                 }
-            }
 
-            for (Taxa species : options.speciesSets) {
-                if (species.getTaxonCount() < 2) {
-                    throw new GeneratorException("Species set, " + species.getId() + ",\n should contain" +
-                            "at least two species. \nPlease go back to Species Sets panel to select included species.", BeautiFrame.TAXON_SETS);
-                }
-                if (ids.contains(species.getId())) {
-                    throw new GeneratorException("A species set has the same id," + species.getId() +
-                            MESSAGE_CAL, BeautiFrame.TAXON_SETS);
+                for (Taxa species : options.speciesSets) {
+                    if (species.getTaxonCount() < 2) {
+                        throw new GeneratorException("Species set, " + species.getId() + ",\n should contain" +
+                                "at least two species. \nPlease go back to Species Sets panel to select included species.", BeautiFrame.TAXON_SETS);
+                    }
+                    if (ids.contains(species.getId())) {
+                        throw new GeneratorException("A species set has the same id," + species.getId() +
+                                MESSAGE_CAL, BeautiFrame.TAXON_SETS);
+                    }
+                    ids.add(species.getId());
                 }
-                ids.add(species.getId());
-            }
 
-            int tId = options.starBEASTOptions.getEmptySpeciesIndex();
-            if (tId >= 0) {
-                throw new GeneratorException("The taxon " + options.taxonList.getTaxonId(tId) +
-                        " has NULL value for \"species\" trait", BeautiFrame.TRAITS);
+                int tId = options.starBEASTOptions.getEmptySpeciesIndex();
+                if (tId >= 0) {
+                    throw new GeneratorException("The taxon " + options.taxonList.getTaxonId(tId) +
+                            " has NULL value for \"species\" trait", BeautiFrame.TRAITS);
+                }
             }
-        }
 
-        //++++++++++++++++ Traits ++++++++++++++++++
-        // missing data is not necessarily an issue...
+            //++++++++++++++++ Traits ++++++++++++++++++
+            // missing data is not necessarily an issue...
 //        for (TraitData trait : options.traits) {
 //            for (int i = 0; i < trait.getTaxaCount(); i++) {
 ////                System.out.println("Taxon " + trait.getTaxon(i).getId() + " : [" + trait.getTaxon(i).getAttribute(trait.getName()) + "]");
@@ -254,103 +254,93 @@ public class BeastGenerator extends Generator {
 //            }
 //        }
 
-        //++++++++++++++++ Tree Prior ++++++++++++++++++
+            //++++++++++++++++ Tree Prior ++++++++++++++++++
 //        if (options.isShareSameTreePrior()) {
-        if (options.getPartitionTreeModels().size() > 1) { //TODO not allowed multi-prior yet
-            for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
-                if (prior.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) {
-                    throw new GeneratorException("For the Skyride, tree model/tree prior combination not implemented by BEAST." +
-                            "\nThe Skyride is only available for a single tree model partition in this release.", BeautiFrame.TREES);
-                }
-            }
-        }
-
-        //+++++++++++++++ Starting tree ++++++++++++++++
-        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
-            if (model.getStartingTreeType() == StartingTreeType.USER) {
-                if (model.getUserStartingTree() == null) {
-                    throw new GeneratorException("Please select a starting tree in " + BeautiFrame.TREES + " panel, " +
-                            "\nwhen choosing user specified starting tree option.", BeautiFrame.TREES);
+            if (options.getPartitionTreeModels().size() > 1) { //TODO not allowed multi-prior yet
+                for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
+                    if (prior.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) {
+                        throw new GeneratorException("For the Skyride, tree model/tree prior combination not implemented by BEAST." +
+                                "\nThe Skyride is only available for a single tree model partition in this release.", BeautiFrame.TREES);
+                    }
                 }
             }
-        }
 
-        //++++++++++++++++ Random local clock model validation ++++++++++++++++++
-        for (PartitionClockModel model : options.getPartitionClockModels()) {
-            // 1 random local clock CANNOT have different tree models
-            if (model.getClockType() == ClockType.RANDOM_LOCAL_CLOCK) { // || AUTOCORRELATED_LOGNORMAL
-                PartitionTreeModel treeModel = null;
-                for (AbstractPartitionData pd : options.getDataPartitions(model)) { // only the PDs linked to this tree model
-                    if (treeModel != null && treeModel != pd.getPartitionTreeModel()) {
-                        throw new GeneratorException("A single random local clock cannot be applied to multiple trees.", BeautiFrame.CLOCK_MODELS);
+            //+++++++++++++++ Starting tree ++++++++++++++++
+            for (PartitionTreeModel model : options.getPartitionTreeModels()) {
+                if (model.getStartingTreeType() == StartingTreeType.USER) {
+                    if (model.getUserStartingTree() == null) {
+                        throw new GeneratorException("Please select a starting tree in " + BeautiFrame.TREES + " panel, " +
+                                "\nwhen choosing user specified starting tree option.", BeautiFrame.TREES);
                     }
-                    treeModel = pd.getPartitionTreeModel();
                 }
             }
-        }
 
-        //++++++++++++++++ Tree Model ++++++++++++++++++
-        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
-            int numOfTaxa = -1;
-            for (AbstractPartitionData pd : options.getDataPartitions(model)) {
-                if (pd.getTaxonCount() > 0) {
-                    if (numOfTaxa > 0) {
-                        if (numOfTaxa != pd.getTaxonCount()) {
-                            throw new GeneratorException("Partitions with different taxa cannot share the same tree.", BeautiFrame.DATA_PARTITIONS);
+            //++++++++++++++++ Random local clock model validation ++++++++++++++++++
+            for (PartitionClockModel model : options.getPartitionClockModels()) {
+                // 1 random local clock CANNOT have different tree models
+                if (model.getClockType() == ClockType.RANDOM_LOCAL_CLOCK) { // || AUTOCORRELATED_LOGNORMAL
+                    PartitionTreeModel treeModel = null;
+                    for (AbstractPartitionData pd : options.getDataPartitions(model)) { // only the PDs linked to this tree model
+                        if (treeModel != null && treeModel != pd.getPartitionTreeModel()) {
+                            throw new GeneratorException("A single random local clock cannot be applied to multiple trees.", BeautiFrame.CLOCK_MODELS);
                         }
-                    } else {
-                        numOfTaxa = pd.getTaxonCount();
+                        treeModel = pd.getPartitionTreeModel();
                     }
                 }
             }
-        }
-
-        //++++++++++++++++ Prior Bounds ++++++++++++++++++
-        for (Parameter param : options.selectParameters()) {
-            if (param.initial != Double.NaN) {
-                if (param.isTruncated && (param.initial < param.truncationLower || param.initial > param.truncationUpper)) {
-                    throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                            "\ninitial value " + param.initial + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
-                            "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
-                } else if (param.priorType == PriorType.UNIFORM_PRIOR && (param.initial < param.uniformLower || param.initial > param.uniformUpper)) {
-                    throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                            "\ninitial value " + param.initial + " is NOT in the range [" + param.uniformLower + ", " + param.uniformUpper + "]," +
-                            "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
-                }
-                if (param.isNonNegative && param.initial < 0.0) {
-                    throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                            "\ninitial value " + param.initial + " should be non-negative. Please check the Prior panel.", BeautiFrame.PRIORS);
-                }
 
-                if (param.isZeroOne && (param.initial < 0.0 || param.initial > 1.0)) {
-                    throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                            "\ninitial value " + param.initial + " should lie in the interval [0, 1]. Please check the Prior panel.", BeautiFrame.PRIORS);
+            //++++++++++++++++ Tree Model ++++++++++++++++++
+            for (PartitionTreeModel model : options.getPartitionTreeModels()) {
+                int numOfTaxa = -1;
+                for (AbstractPartitionData pd : options.getDataPartitions(model)) {
+                    if (pd.getTaxonCount() > 0) {
+                        if (numOfTaxa > 0) {
+                            if (numOfTaxa != pd.getTaxonCount()) {
+                                throw new GeneratorException("Partitions with different taxa cannot share the same tree.", BeautiFrame.DATA_PARTITIONS);
+                            }
+                        } else {
+                            numOfTaxa = pd.getTaxonCount();
+                        }
+                    }
                 }
             }
-        }
 
-        //++++++++++++++++ Improper priors ++++++++++++++++++
-        MarginalLikelihoodEstimationOptions mleOptions = (MarginalLikelihoodEstimationOptions)options.getComponentOptions(MarginalLikelihoodEstimationOptions.class);
-        if (mleOptions.performMLE) {
+            //++++++++++++++++ Prior Bounds ++++++++++++++++++
             for (Parameter param : options.selectParameters()) {
-                    if (param.isPriorImproper() || (param.priorType == PriorType.ONE_OVER_X_PRIOR && !param.getBaseName().contains("popSize"))) {
+                if (param.initial != Double.NaN) {
+                    if (param.isTruncated && (param.initial < param.truncationLower || param.initial > param.truncationUpper)) {
+                        throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
+                                "\ninitial value " + param.initial + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
+                                "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
+                    } else if (param.priorType == PriorType.UNIFORM_PRIOR && (param.initial < param.uniformLower || param.initial > param.uniformUpper)) {
+                        throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
+                                "\ninitial value " + param.initial + " is NOT in the range [" + param.uniformLower + ", " + param.uniformUpper + "]," +
+                                "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
+                    }
+                    if (param.isNonNegative && param.initial < 0.0) {
                         throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                                "\nhas an improper prior and will not sample correctly when estimating " +
-                                "the marginal likelihood. " +
-                                "\nPlease check the Prior panel.", BeautiFrame.PRIORS);
+                                "\ninitial value " + param.initial + " should be non-negative. Please check the Prior panel.", BeautiFrame.PRIORS);
                     }
+
+                    if (param.isZeroOne && (param.initial < 0.0 || param.initial > 1.0)) {
+                        throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
+                                "\ninitial value " + param.initial + " should lie in the interval [0, 1]. Please check the Prior panel.", BeautiFrame.PRIORS);
+                    }
+                }
             }
-        }
 
-        // add other tests and warnings here
-        // Speciation model with dated tips
-        // Sampling rates without dated tips or priors on rate or nodes
+            checkComponentOptions();
+
+            // add other tests and warnings here
+            // Speciation model with dated tips
+            // Sampling rates without dated tips or priors on rate or nodes
 
         } catch (Exception e) {
             // catch any other exceptions here and rethrow to generate messages
             throw new GeneratorException(e.getMessage());
         }
 
+
     }
 
     /**
@@ -783,7 +773,7 @@ public class BeastGenerator extends Generator {
     private void writeTaxon(Taxon taxon, boolean hasDate, boolean hasAttr, XMLWriter writer) throws Arguments.ArgumentException {
 
         writer.writeTag(TaxonParser.TAXON, new Attribute[]{
-                new Attribute.Default<String>(XMLParser.ID, taxon.getId())},
+                        new Attribute.Default<String>(XMLParser.ID, taxon.getId())},
                 !(hasDate || hasAttr)); // false if any of hasDate or hasAttr is true
 
 
diff --git a/src/dr/app/beauti/generator/BranchRatesModelGenerator.java b/src/dr/app/beauti/generator/BranchRatesModelGenerator.java
index d25f89c..bb5881e 100644
--- a/src/dr/app/beauti/generator/BranchRatesModelGenerator.java
+++ b/src/dr/app/beauti/generator/BranchRatesModelGenerator.java
@@ -32,19 +32,18 @@ import dr.app.beauti.types.PriorType;
 import dr.app.beauti.util.XMLWriter;
 import dr.evolution.util.Taxa;
 import dr.evomodel.branchratemodel.BranchRateModel;
+import dr.evomodel.branchratemodel.ContinuousBranchRates;
 import dr.evomodel.branchratemodel.LocalClockModel;
 import dr.evomodel.clock.RateEvolutionLikelihood;
 import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.branchratemodel.DiscretizedBranchRatesParser;
-import dr.evomodelxml.branchratemodel.LocalClockModelParser;
-import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
-import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
+import dr.evomodelxml.branchratemodel.*;
 import dr.evomodelxml.clock.ACLikelihoodParser;
 import dr.evomodelxml.tree.RateCovarianceStatisticParser;
 import dr.evomodelxml.tree.RateStatisticParser;
 import dr.evomodelxml.tree.TreeModelParser;
 import dr.evoxml.TaxaParser;
 import dr.inference.distribution.ExponentialDistributionModel;
+import dr.inference.distribution.GammaDistributionModel;
 import dr.inference.model.ParameterParser;
 import dr.inferencexml.distribution.LogNormalDistributionModelParser;
 import dr.inferencexml.model.CompoundParameterParser;
@@ -75,7 +74,6 @@ public class BranchRatesModelGenerator extends Generator {
         setModelPrefix(model.getPrefix());
 
         Attribute[] attributes;
-        int categoryCount = 0;
         String treePrefix;
         List<PartitionTreeModel> activeTrees = options.getPartitionTreeModels(options.getDataPartitions(model));
 
@@ -90,11 +88,42 @@ public class BranchRatesModelGenerator extends Generator {
                 writeParameter("rate", "clock.rate", model, writer);
                 writer.writeCloseTag(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES);
 
+                for (PartitionTreeModel tree : activeTrees) {
+                    treePrefix = tree.getPrefix();
+
+                    PartitionClockModelTreeModelLink clockTree = options.getPartitionClockTreeLink(model, tree);
+                    if (clockTree == null) {
+                        throw new IllegalArgumentException("Cannot find PartitionClockTreeLink, given clock model = " + model.getName()
+                                + ", tree model = " + tree.getName());
+                    }
+                    writer.writeText("");
+                    writer.writeOpenTag(
+                            RateStatisticParser.RATE_STATISTIC,
+                            new Attribute[]{
+                                    new Attribute.Default<String>(XMLParser.ID, options.noDuplicatedPrefix(modelPrefix, treePrefix) + "meanRate"),
+                                    new Attribute.Default<String>("name", options.noDuplicatedPrefix(modelPrefix, treePrefix) + "meanRate"),
+                                    new Attribute.Default<String>("mode", "mean"),
+                                    new Attribute.Default<String>("internal", "true"),
+                                    new Attribute.Default<String>("external", "true")
+                            }
+                    );
+                    writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
+                    writer.writeIDref(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES, options.noDuplicatedPrefix(modelPrefix, treePrefix)
+                            + BranchRateModel.BRANCH_RATES);
+                    writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
+                }
                 break;
 
             case UNCORRELATED:
                 writer.writeComment("The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut (2006) PLoS Biology 4, e88 )");
 
+                String branchRateElementName = DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES;
+
+                if (model.isContinuousQuantile()) {
+                    writer.writeComment("  Continuous quantile implementation (Li & Drummond (2012) Mol Biol Evol 29:751-61)");
+                    branchRateElementName = ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES;
+                }
+
                 for (PartitionTreeModel tree : activeTrees) {
                     treePrefix = tree.getPrefix();
 
@@ -113,7 +142,7 @@ public class BranchRatesModelGenerator extends Generator {
                     attributes = new Attribute[]{new Attribute.Default<String>(XMLParser.ID, options.noDuplicatedPrefix(modelPrefix, treePrefix)
                             + BranchRateModel.BRANCH_RATES)};
                     //}
-                    writer.writeOpenTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, attributes);
+                    writer.writeOpenTag(branchRateElementName, attributes);
                     // tree
                     writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
 
@@ -136,8 +165,18 @@ public class BranchRatesModelGenerator extends Generator {
                             writer.writeCloseTag(LogNormalDistributionModelParser.LOGNORMAL_DISTRIBUTION_MODEL);
                             break;
                         case GAMMA:
-                            throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
-//                            break;
+                            writer.writeOpenTag(GammaDistributionModel.GAMMA_DISTRIBUTION_MODEL);
+
+                            if (activeTrees.indexOf(tree) < 1) {
+                                writeParameter("mean", ClockType.UCGD_MEAN, model, writer);
+                                writeParameter("shape", ClockType.UCGD_SHAPE, model, writer);
+                            } else {
+                                writeParameterRef("mean", modelPrefix + ClockType.UCGD_MEAN, writer);
+                                writeParameterRef("shape", modelPrefix + ClockType.UCGD_SHAPE, writer);
+                            }
+
+                            writer.writeCloseTag(GammaDistributionModel.GAMMA_DISTRIBUTION_MODEL);
+                            break;
                         case CAUCHY:
                             throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
 //                            break;
@@ -156,23 +195,19 @@ public class BranchRatesModelGenerator extends Generator {
 
                     writer.writeCloseTag("distribution");
 
-                    writer.writeOpenTag(DiscretizedBranchRatesParser.RATE_CATEGORIES);
-                    // AR - this parameter will now set its dimension automatically when BEAST is run
-//                    if (!options.hasIdenticalTaxa()) {
-//                        for (AbstractPartitionData dataPartition : options.dataPartitions) {
-//                            if (dataPartition.getPartitionClockModel().equals(model)) {
-//                                categoryCount = (dataPartition.getTaxonCount() - 1) * 2;
-//                            }
-//                        }
-//                    } else {
-//                        categoryCount = (options.taxonList.getTaxonCount() - 1) * 2;
-//                    }
-//                    writeParameter(clockTree.getParameter("branchRates.categories"), categoryCount, writer);
-                    writeParameter(clockTree.getParameter("branchRates.categories"), -1, writer);
-                    writer.writeCloseTag(DiscretizedBranchRatesParser.RATE_CATEGORIES);
-                    writer.writeCloseTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES);
+                    if (model.isContinuousQuantile()) {
+                        writer.writeOpenTag(ContinuousBranchRatesParser.RATE_QUANTILES);
+                        writeParameter(clockTree.getParameter("branchRates.quantiles"), -1, writer);
+                        writer.writeCloseTag(ContinuousBranchRatesParser.RATE_QUANTILES);
+                        writer.writeCloseTag(branchRateElementName);
+                    } else {
+                        writer.writeOpenTag(DiscretizedBranchRatesParser.RATE_CATEGORIES);
+                        writeParameter(clockTree.getParameter("branchRates.categories"), -1, writer);
+                        writer.writeCloseTag(DiscretizedBranchRatesParser.RATE_CATEGORIES);
+                        writer.writeCloseTag(branchRateElementName);
+                    }
 
-                    writer.writeText("");
+                      writer.writeText("");
                     writer.writeOpenTag(
                             RateStatisticParser.RATE_STATISTIC,
                             new Attribute[]{
@@ -184,7 +219,7 @@ public class BranchRatesModelGenerator extends Generator {
                             }
                     );
                     writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
-                    writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, options.noDuplicatedPrefix(modelPrefix, treePrefix)
+                    writer.writeIDref(branchRateElementName, options.noDuplicatedPrefix(modelPrefix, treePrefix)
                             + BranchRateModel.BRANCH_RATES);
                     writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
 
@@ -200,7 +235,7 @@ public class BranchRatesModelGenerator extends Generator {
                             }
                     );
                     writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
-                    writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, options.noDuplicatedPrefix(modelPrefix, treePrefix)
+                    writer.writeIDref(branchRateElementName, options.noDuplicatedPrefix(modelPrefix, treePrefix)
                             + BranchRateModel.BRANCH_RATES);
                     writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
 
@@ -213,7 +248,7 @@ public class BranchRatesModelGenerator extends Generator {
                             }
                     );
                     writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
-                    writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, options.noDuplicatedPrefix(modelPrefix, treePrefix) + BranchRateModel.BRANCH_RATES);
+                    writer.writeIDref(branchRateElementName, options.noDuplicatedPrefix(modelPrefix, treePrefix) + BranchRateModel.BRANCH_RATES);
                     writer.writeCloseTag(RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC);
                 }
 
@@ -351,12 +386,12 @@ public class BranchRatesModelGenerator extends Generator {
 
                 writer.writeOpenTag("rates");
                 writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>
-                        (XMLParser.ID, modelPrefix + ClockType.LOCAL_CLOCK + ".relativeRates")
+                                (XMLParser.ID, modelPrefix + ClockType.LOCAL_CLOCK + ".relativeRates")
                         , true);
                 writer.writeCloseTag("rates");
                 writer.writeOpenTag("rateIndicator");
                 writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>
-                        (XMLParser.ID, modelPrefix + ClockType.LOCAL_CLOCK + ".changes")
+                                (XMLParser.ID, modelPrefix + ClockType.LOCAL_CLOCK + ".changes")
                         , true);
                 writer.writeCloseTag("rateIndicator");
 
@@ -510,10 +545,53 @@ public class BranchRatesModelGenerator extends Generator {
 
     }
 
+    /**
+     * Write the branch rates model reference.
+     *
+     * @param model  PartitionClockModel
+     * @param writer the writer
+     */
+    public static void writeBranchRatesModelRef(PartitionClockModel model, XMLWriter writer) {
+        String tag = "";
+        String id = "";
+
+        switch (model.getClockType()) {
+            case STRICT_CLOCK:
+                tag = StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES;
+                id = model.getPrefix() + BranchRateModel.BRANCH_RATES;
+                break;
+
+            case UNCORRELATED:
+                tag = model.isContinuousQuantile() ?
+                        ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES :
+                        DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES;
+                id = model.getPrefix() + BranchRateModel.BRANCH_RATES;
+                break;
+
+            case RANDOM_LOCAL_CLOCK:
+                tag = RandomLocalClockModelParser.LOCAL_BRANCH_RATES;
+                id = model.getPrefix() + BranchRateModel.BRANCH_RATES;
+                break;
+
+            case FIXED_LOCAL_CLOCK:
+                tag = LocalClockModelParser.LOCAL_CLOCK_MODEL;
+                id = model.getPrefix() + BranchRateModel.BRANCH_RATES;
+                break;
+            case AUTOCORRELATED:
+                tag = ACLikelihoodParser.AC_LIKELIHOOD;
+                throw new UnsupportedOperationException("Autocorrelated relaxed clock model not implemented yet");
+
+            default:
+                throw new IllegalArgumentException("Unknown clock model");
+        }
+        writer.writeIDref(tag, id);
+    }
+
     public void writeAllClockRateRefs(PartitionClockModel model, XMLWriter writer) {
         writer.writeIDref(ParameterParser.PARAMETER, getClockRateString(model));
     }
 
+
     public String getClockRateString(PartitionClockModel model) {
         setModelPrefix(model.getPrefix());
 
@@ -529,11 +607,10 @@ public class BranchRatesModelGenerator extends Generator {
                     case LOGNORMAL:
                         return modelPrefix + ClockType.UCLD_MEAN;
                     case GAMMA:
-                        throw new UnsupportedOperationException("Uncorrelated gamma model not supported yet");
-//                        return modelPrefix + ClockType.UCGD_SCALE;
+                        return modelPrefix + ClockType.UCGD_MEAN;
                     case CAUCHY:
                         throw new UnsupportedOperationException("Uncorrelated Cauchy model not supported yet");
-//                        return modelPrefix + ClockType.UCCD_MEAN;
+//                        return null;
                     case EXPONENTIAL:
                         return modelPrefix + ClockType.UCED_MEAN;
                 }
@@ -574,9 +651,12 @@ public class BranchRatesModelGenerator extends Generator {
                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
                         break;
                     case GAMMA:
-                        throw new UnsupportedOperationException("Uncorrelated gamma model not supported yet");
+                        writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCGD_MEAN);
+                        writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCGD_SHAPE);
+                        break;
                     case CAUCHY:
                         throw new UnsupportedOperationException("Uncorrelated Couchy model not supported yet");
+//                        break;
                     case EXPONENTIAL:
                         writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCED_MEAN);
                         break;
@@ -597,6 +677,9 @@ public class BranchRatesModelGenerator extends Generator {
 
         switch (model.getClockType()) {
             case STRICT_CLOCK:
+                for (PartitionTreeModel tree : options.getPartitionTreeModels(options.getDataPartitions(model))) {
+                    writer.writeIDref(RateStatisticParser.RATE_STATISTIC, options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix()) + "meanRate");
+                }
                 break;
 
             case UNCORRELATED:
@@ -637,11 +720,7 @@ public class BranchRatesModelGenerator extends Generator {
     public void writeClockLikelihoodReferences(XMLWriter writer) {
         for (AbstractPartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood
             PartitionClockModel clockModel = partition.getPartitionClockModel();
-
-            if (clockModel != null && clockModel.getClockType() == ClockType.AUTOCORRELATED) {
-                throw new UnsupportedOperationException("Autocorrelated relaxed clock model not implemented yet");
-//                writer.writeIDref(ACLikelihoodParser.AC_LIKELIHOOD, clockModel.getPrefix() + BranchRateModel.BRANCH_RATES);
-            }
+            writeBranchRatesModelRef(clockModel, writer);
         }
     }
 
diff --git a/src/dr/app/beauti/generator/ComponentGenerator.java b/src/dr/app/beauti/generator/ComponentGenerator.java
index 02b0ed3..e5e2f60 100644
--- a/src/dr/app/beauti/generator/ComponentGenerator.java
+++ b/src/dr/app/beauti/generator/ComponentGenerator.java
@@ -67,6 +67,14 @@ public interface ComponentGenerator {
     }
 
     /**
+     * Opportunity for a component to do some pre-generation checks of the options selected.
+     * Throwing a GeneratorException will result in a dialog box being shown to the user and
+     * generation being disallowed.
+     * @throws Generator.GeneratorException
+     */
+    void checkOptions() throws Generator.GeneratorException;
+
+    /**
      * Returns whether this component requires access to a particular insertion point
      * @param point the insertion point
      * @return whether it requires it
diff --git a/src/dr/app/beauti/generator/Generator.java b/src/dr/app/beauti/generator/Generator.java
index 93922e6..652a290 100644
--- a/src/dr/app/beauti/generator/Generator.java
+++ b/src/dr/app/beauti/generator/Generator.java
@@ -77,6 +77,12 @@ public abstract class Generator {
         }
     }
 
+    public final void checkComponentOptions() throws GeneratorException {
+        for (ComponentGenerator component : components) {
+                component.checkOptions();
+        }
+    }
+
     public String getModelPrefix() {
         return modelPrefix;
     }
diff --git a/src/dr/app/beauti/generator/LogGenerator.java b/src/dr/app/beauti/generator/LogGenerator.java
index f8aa741..8675793 100644
--- a/src/dr/app/beauti/generator/LogGenerator.java
+++ b/src/dr/app/beauti/generator/LogGenerator.java
@@ -35,10 +35,7 @@ import dr.evolution.datatype.DataType;
 import dr.evolution.util.Taxa;
 import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.branchratemodel.DiscretizedBranchRatesParser;
-import dr.evomodelxml.branchratemodel.LocalClockModelParser;
-import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
-import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
+import dr.evomodelxml.branchratemodel.*;
 import dr.evomodelxml.clock.ACLikelihoodParser;
 import dr.evomodelxml.coalescent.CoalescentLikelihoodParser;
 import dr.evomodelxml.coalescent.GMRFSkyrideLikelihoodParser;
@@ -314,25 +311,6 @@ public class LogGenerator extends Generator {
             branchRatesModelGenerator.writeLogStatistic(model, writer);
         }
 
-        if (options.logCoalescentEventsStatistic) {
-            writer.writeOpenTag("coalescentEventsStatistic");
-            // coalescentLikelihood
-            for (PartitionTreeModel model : options.getPartitionTreeModels()) {
-                PartitionTreePrior prior = model.getPartitionTreePrior();
-                treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer);
-                writer.writeText("");
-            }
-
-            /*for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
-                if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) {
-                    writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT); // only 1 coalescent
-                } else if (prior.getNodeHeightPrior() == TreePriorType.SKYGRID) {
-                    writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYGRID_LIKELIHOOD, prior.getPrefix() + "skygrid");
-                }
-            }*/
-            writer.writeCloseTag("coalescentEventsStatistic");
-        }
-
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer);
 
         treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
@@ -581,7 +559,6 @@ public class LogGenerator extends Generator {
                         });
                 writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL);
 
-                // assume the first clock model is the one... (not sure if this makes sense)
                 PartitionClockModel model = options.getPartitionClockModels(options.getDataPartitions(tree)).get(0);
                 String tag = "";
                 String id = "";
@@ -593,7 +570,9 @@ public class LogGenerator extends Generator {
                         break;
 
                     case UNCORRELATED:
-                        tag = DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES;
+                        tag = model.isContinuousQuantile() ?
+                                ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES :
+                                DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES;
                         id = model.getPrefix() + BranchRateModel.BRANCH_RATES;
                         break;
 
@@ -635,7 +614,9 @@ public class LogGenerator extends Generator {
                     break;
 
                 case UNCORRELATED:
-                    writeTreeTrait(writer, DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
+                    writeTreeTrait(writer, model.isContinuousQuantile() ?
+                                    ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES :
+                                    DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
                             options.noDuplicatedPrefix(model.getPrefix(), tree.getPrefix()) + BranchRateModel.BRANCH_RATES,
                             BranchRateModel.RATE, model.getPrefix() + BranchRateModel.RATE);
                     break;
diff --git a/src/dr/app/beauti/generator/OperatorsGenerator.java b/src/dr/app/beauti/generator/OperatorsGenerator.java
index 2ec427f..99fd690 100644
--- a/src/dr/app/beauti/generator/OperatorsGenerator.java
+++ b/src/dr/app/beauti/generator/OperatorsGenerator.java
@@ -46,8 +46,8 @@ import dr.evomodelxml.speciation.SpeciesTreeModelParser;
 import dr.evomodelxml.speciation.YuleModelParser;
 import dr.evomodelxml.substmodel.GeneralSubstitutionModelParser;
 import dr.inference.model.ParameterParser;
+import dr.inference.operators.OperatorSchedule;
 import dr.inference.operators.RateBitExchangeOperator;
-import dr.inference.operators.SimpleOperatorSchedule;
 import dr.inferencexml.model.CompoundParameterParser;
 import dr.inferencexml.operators.*;
 import dr.util.Attribute;
@@ -95,7 +95,9 @@ public class OperatorsGenerator extends Generator {
         operatorAttributes = new Attribute[] {
                 new Attribute.Default<String>(XMLParser.ID, "operators"),
                 new Attribute.Default<String>(SimpleOperatorScheduleParser.OPTIMIZATION_SCHEDULE,
-                        (shouldLogCool ? SimpleOperatorSchedule.LOG_STRING : SimpleOperatorSchedule.DEFAULT_STRING))
+                        (shouldLogCool ?
+                                OperatorSchedule.OptimizationTransform.LOG.toString() :
+                                OperatorSchedule.OptimizationTransform.DEFAULT.toString()))
         };
 
         writer.writeComment("Define operators");
@@ -185,6 +187,9 @@ public class OperatorsGenerator extends Generator {
             case INTEGER_UNIFORM:
                 writeIntegerUniformOperator(operator, writer);
                 break;
+            case SUBTREE_LEAP:
+                writeSubtreeLeapOperator(operator, writer);
+                break;
             case SUBTREE_SLIDE:
                 writeSubtreeSlideOperator(operator, writer);
                 break;
@@ -348,7 +353,7 @@ public class OperatorsGenerator extends Generator {
 
         if (operator.getBaseName().startsWith(RelativeRatesType.MU_RELATIVE_RATES.toString())) {
 
-            int[] parameterWeights = ((PartitionSubstitutionModel) operator.parameter1.getOptions()).getPartitionCodonWeights();
+            int[] parameterWeights = operator.parameter1.getParameterDimensionWeights();
 
             if (parameterWeights != null && parameterWeights.length > 1) {
                 String pw = "" + parameterWeights[0];
@@ -562,6 +567,17 @@ public class OperatorsGenerator extends Generator {
         writer.writeCloseTag(ScaleOperatorParser.SCALE_OPERATOR);
     }
 
+    private void writeSubtreeLeapOperator(Operator operator, XMLWriter writer) {
+        writer.writeOpenTag(SubtreeLeapOperatorParser.SUBTREE_LEAP,
+                new Attribute[]{
+                        new Attribute.Default<Double>("size", operator.tuning),
+                        getWeightAttribute(operator.weight)
+                }
+        );
+        writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
+        writer.writeCloseTag(SubtreeLeapOperatorParser.SUBTREE_LEAP);
+    }
+
     private void writeSubtreeSlideOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(SubtreeSlideOperatorParser.SUBTREE_SLIDE,
                 new Attribute[]{
diff --git a/src/dr/app/beauti/generator/ParameterPriorGenerator.java b/src/dr/app/beauti/generator/ParameterPriorGenerator.java
index 5305e29..add36ee 100644
--- a/src/dr/app/beauti/generator/ParameterPriorGenerator.java
+++ b/src/dr/app/beauti/generator/ParameterPriorGenerator.java
@@ -265,6 +265,19 @@ public class ParameterPriorGenerator extends Generator {
             case LOGNORMAL_HPM_PRIOR:
                 // Do nothing, densities are already in a distributionLikelihood
                 break;
+            case DIRICHLET_PRIOR:
+                int dimensions = parameter.getParameterDimensionWeights().length;
+                String counts = "1.0";
+                for (int i = 1; i < dimensions; i++) {
+                   counts += " 1.0";
+                }
+                writer.writeOpenTag(PriorParsers.DIRICHLET_PRIOR,
+                        new Attribute[]{
+                                new Attribute.Default<String>(PriorParsers.COUNTS, counts),
+                        });
+                writeParameterIdref(writer, parameter);
+                writer.writeCloseTag(PriorParsers.DIRICHLET_PRIOR);
+                break;
             default:
                 throw new IllegalArgumentException("Unknown priorType");
         }
diff --git a/src/dr/app/beauti/generator/SubstitutionModelGenerator.java b/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
index adf9fdc..16f994d 100644
--- a/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
+++ b/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
@@ -112,8 +112,6 @@ public class SubstitutionModelGenerator extends Generator {
                     writer.writeCloseTag(HKYParser.KAPPA);
                     writer.writeCloseTag(NucModelType.HKY.getXMLName());
 
-                    throw new IllegalArgumentException("AR: Need to check that kappa = 1 for JC (I have feeling it should be 0.5)");
-
                 } else {
                     // Hasegawa Kishino and Yano 85 model
                     if (model.getNucSubstitutionModel() == NucModelType.HKY) {
diff --git a/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java b/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
index 02107da..7f26eb7 100644
--- a/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
+++ b/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
@@ -132,12 +132,16 @@ public class TreeLikelihoodGenerator extends Generator {
 
         Attribute[] attributes;
         if (tag.equals(MarkovJumpsTreeLikelihoodParser.MARKOV_JUMP_TREE_LIKELIHOOD)) {
+            AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options
+                    .getComponentOptions(AncestralStatesComponentOptions.class);
+            boolean saveCompleteHistory = ancestralStatesOptions.isCompleteHistoryLogging(partition);
             attributes = new Attribute[]{
                     new Attribute.Default<String>(XMLParser.ID, idString),
                     new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, substModel.isUseAmbiguitiesTreeLikelihood()),
                     new Attribute.Default<Boolean>(MarkovJumpsTreeLikelihoodParser.USE_UNIFORMIZATION, true),
                     new Attribute.Default<Integer>(MarkovJumpsTreeLikelihoodParser.NUMBER_OF_SIMULANTS, 1),
                     new Attribute.Default<String>(AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG_NAME, prefix + AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG),
+                    new Attribute.Default<String>(MarkovJumpsTreeLikelihoodParser.SAVE_HISTORY, saveCompleteHistory ? "true" : "false"),
             };
         } else if (tag.equals(TreeLikelihoodParser.ANCESTRAL_TREE_LIKELIHOOD)) {
             attributes = new Attribute[]{
@@ -173,34 +177,7 @@ public class TreeLikelihoodGenerator extends Generator {
             writer.writeIDref(GammaSiteModel.SITE_MODEL, substModel.getPrefix() + SiteModel.SITE_MODEL);
         }
 
-        switch (clockModel.getClockType()) {
-            case STRICT_CLOCK:
-                writer.writeIDref(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES, clockModel.getPrefix()
-                        + BranchRateModel.BRANCH_RATES);
-                break;
-            case UNCORRELATED:
-                writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, options.noDuplicatedPrefix(clockModel.getPrefix(), treeModel.getPrefix())
-                        + BranchRateModel.BRANCH_RATES);
-                break;
-            case RANDOM_LOCAL_CLOCK:
-                writer.writeIDref(RandomLocalClockModelParser.LOCAL_BRANCH_RATES, clockModel.getPrefix()
-                        + BranchRateModel.BRANCH_RATES);
-                break;
-            case FIXED_LOCAL_CLOCK:
-                writer.writeIDref(LocalClockModelParser.LOCAL_CLOCK_MODEL, clockModel.getPrefix()
-                        + BranchRateModel.BRANCH_RATES);
-                break;
-
-
-            case AUTOCORRELATED:
-                throw new UnsupportedOperationException("Autocorrelated relaxed clock model not implemented yet");
-//            	writer.writeIDref(ACLikelihoodParser.AC_LIKELIHOOD, options.noDuplicatedPrefix(clockModel.getPrefix(), treeModel.getPrefix())
-//                        + BranchRateModel.BRANCH_RATES);
-//                break;
-
-            default:
-                throw new IllegalArgumentException("Unknown clock model");
-        }
+        BranchRatesModelGenerator.writeBranchRatesModelRef(clockModel, writer);
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREE_LIKELIHOOD, partition, prefix, writer);
 
diff --git a/src/dr/app/beauti/generator/TreePriorGenerator.java b/src/dr/app/beauti/generator/TreePriorGenerator.java
index ba47a2c..ee39a96 100644
--- a/src/dr/app/beauti/generator/TreePriorGenerator.java
+++ b/src/dr/app/beauti/generator/TreePriorGenerator.java
@@ -1009,9 +1009,9 @@ public class TreePriorGenerator extends Generator {
         writer.writeCloseTag(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL);
     }
 
-    public void writePriorLikelihoodReferenceLog(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) {
+    public static void writePriorLikelihoodReferenceLog(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) {
         //tree model prefix
-        setModelPrefix(model.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1)
+        String modelPrefix = model.getPrefix(); // only has prefix, if (options.getPartitionTreePriors().size() > 1)
 
         switch (prior.getNodeHeightPrior()) {
 
@@ -1031,7 +1031,7 @@ public class TreePriorGenerator extends Generator {
                 writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD, modelPrefix + "skyride");
                 break;
             case SKYGRID:
-//                writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD, modelPrefix + "skygrid");
+                writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD, modelPrefix + "skygrid");
                 // only 1 coalescent, so write it separately after this method
                 break;
             case LOGISTIC:
diff --git a/src/dr/app/beauti/mcmcpanel/MCMCPanel.java b/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
index 8c46880..efbba47 100644
--- a/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
+++ b/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
@@ -284,6 +284,7 @@ public class MCMCPanel extends BeautiPanel {
             public void actionPerformed(ActionEvent e) {
                 if (performMLECombo.getSelectedIndex() == 1) {
                     mleOptions.performMLE = true;
+                    options.logCoalescentEventsStatistic = false;
                     buttonMLE.setEnabled(true);
                     updateMLEFileNameStem();
                 } else if (performMLECombo.getSelectedIndex() == 2) {
@@ -294,6 +295,7 @@ public class MCMCPanel extends BeautiPanel {
                     updateMLEFileNameStem();
                 } else {
                     mleOptions.performMLE = false;
+                    mleOptions.performMLEGSS = false;
                     mleOptions.printOperatorAnalysis = false;
                     options.logCoalescentEventsStatistic = false;
                     buttonMLE.setEnabled(false);
diff --git a/src/dr/app/beauti/operatorspanel/OperatorsPanel.java b/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
index 7960394..36b755c 100644
--- a/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
+++ b/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
@@ -29,6 +29,8 @@ import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.BeautiPanel;
 import dr.app.beauti.options.BeautiOptions;
 import dr.app.beauti.options.Operator;
+import dr.app.beauti.types.OperatorSetType;
+import dr.app.beauti.util.PanelUtils;
 import dr.app.gui.table.RealNumberCellEditor;
 import jam.framework.Exportable;
 import jam.table.HeaderRenderer;
@@ -59,6 +61,12 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
 
     JCheckBox autoOptimizeCheck = null;
 
+    JComboBox operatorSetCombo = new JComboBox(new OperatorSetType[] {
+            OperatorSetType.DEFAULT,
+            OperatorSetType.NEW_TREE_MIX,
+            OperatorSetType.FIXED_TREE_TOPOLOGY
+    });
+
     public List<Operator> operators = new ArrayList<Operator>();
 
     private BeautiOptions options;
@@ -126,12 +134,29 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
         toolBar1.setOpaque(false);
         toolBar1.setLayout(new FlowLayout(java.awt.FlowLayout.LEFT, 0, 0));
         toolBar1.add(autoOptimizeCheck);
+        toolBar1.add(new JToolBar.Separator(new Dimension(12, 12)));
+        final JLabel label = new JLabel("Operator mix: ");
+        toolBar1.add(label);
+        PanelUtils.setupComponent(operatorSetCombo);
+        toolBar1.add(operatorSetCombo);
 
         setOpaque(false);
         setLayout(new BorderLayout(0, 0));
         setBorder(new BorderUIResource.EmptyBorderUIResource(new java.awt.Insets(12, 12, 12, 12)));
         add(toolBar1, BorderLayout.NORTH);
         add(scrollPane, BorderLayout.CENTER);
+
+
+        operatorSetCombo.addItemListener(
+                new java.awt.event.ItemListener() {
+                    public void itemStateChanged(java.awt.event.ItemEvent ev) {
+                        options.operatorSetType = (OperatorSetType)operatorSetCombo.getSelectedItem();
+                        operators = options.selectOperators();
+                        operatorTableModel.fireTableDataChanged();
+                        operatorsChanged();
+                    }
+                }
+        );
     }
 
     public final void operatorsChanged() {
@@ -141,6 +166,8 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
     public void setOptions(BeautiOptions options) {
         this.options = options;
 
+        operatorSetCombo.setSelectedItem(options.operatorSetType);
+
         autoOptimizeCheck.setSelected(options.autoOptimize);
         operators = options.selectOperators();
         operatorTableModel.fireTableDataChanged();
diff --git a/src/dr/app/beauti/options/BeautiOptions.java b/src/dr/app/beauti/options/BeautiOptions.java
index 9a1186d..0df86b2 100644
--- a/src/dr/app/beauti/options/BeautiOptions.java
+++ b/src/dr/app/beauti/options/BeautiOptions.java
@@ -32,6 +32,7 @@ import dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions;
 import dr.app.beauti.components.continuous.ContinuousComponentOptions;
 import dr.app.beauti.components.discrete.DiscreteTraitsComponentOptions;
 import dr.app.beauti.mcmcpanel.MCMCPanel;
+import dr.app.beauti.types.OperatorSetType;
 import dr.app.beauti.types.TreePriorType;
 import dr.app.beauti.util.BeautiTemplate;
 import dr.evolution.alignment.Alignment;
@@ -119,7 +120,7 @@ public class BeautiOptions extends ModelOptions {
         units = Units.Type.SUBSTITUTIONS;
 
         // Operator schedule options
-        coolingSchedule = OperatorSchedule.DEFAULT_SCHEDULE;
+        optimizationTransform = OperatorSchedule.OptimizationTransform.DEFAULT;
 
         // MCMC options
         chainLength = 10000000;
@@ -1399,7 +1400,7 @@ public class BeautiOptions extends ModelOptions {
     public Units.Type units = Units.Type.YEARS;
 
     // Operator schedule options
-    public int coolingSchedule = OperatorSchedule.DEFAULT_SCHEDULE;
+    public OperatorSchedule.OptimizationTransform optimizationTransform = OperatorSchedule.OptimizationTransform.DEFAULT;
 
     // MCMC options
     public int chainLength = 10000000;
@@ -1432,6 +1433,8 @@ public class BeautiOptions extends ModelOptions {
     public ClockModelOptions clockModelOptions = new ClockModelOptions(this);
     public TreeModelOptions treeModelOptions = new TreeModelOptions(this);
 
+    public OperatorSetType operatorSetType = OperatorSetType.DEFAULT;
+
     public boolean useStarBEAST = false;
     public List<Taxa> speciesSets = new ArrayList<Taxa>();
     public Map<Taxa, Boolean> speciesSetsMono = new HashMap<Taxa, Boolean>();
diff --git a/src/dr/app/beauti/options/DateGuesser.java b/src/dr/app/beauti/options/DateGuesser.java
index 282ef96..6efb6a0 100644
--- a/src/dr/app/beauti/options/DateGuesser.java
+++ b/src/dr/app/beauti/options/DateGuesser.java
@@ -30,16 +30,15 @@ import dr.evolution.util.Taxon;
 import dr.evolution.util.TaxonList;
 import dr.evolution.util.Units;
 
-import java.io.Serializable;
+import java.io.*;
 import java.text.*;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.TimeZone;
+import java.util.*;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 /**
  * @author Andrew Rambaut
+ * @author Tommy Lam
  */
 public class DateGuesser implements Serializable {
     private static final long serialVersionUID = -9106689400887615213L;
@@ -56,6 +55,8 @@ public class DateGuesser implements Serializable {
     public int order = 0;
     public String prefix;
     public String regex;
+    public File loadFile;
+    public HashMap<String, String> load;
     public double offset = 0.0;
     public double unlessLessThan = 0.0;
     public double offset2 = 0.0;
@@ -76,12 +77,29 @@ public class DateGuesser implements Serializable {
         guessDates(taxa);
     }
 
+    public void guessDates(TaxonList taxonList, Map<Taxon, String> taxonDateMap) {
+        // To avoid duplicating code, add all the taxa into a list and
+        // pass it to guessDates(List<Taxon> taxonList)
+        List<Taxon> taxa = new ArrayList<Taxon>();
+        for (Taxon taxon : taxonList) {
+            taxa.add(taxon);
+        }
+
+        guessDates(taxa, taxonDateMap);
+    }
+
     public void guessDates(List<Taxon> taxonList) {
+        guessDates(taxonList, null);
+    }
+
+    public void guessDates(List<Taxon> taxonList, Map<Taxon, String> taxonDateMap) {
 
         dateFormat = new SimpleDateFormat(calendarDateFormat);
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
         for (int i = 0; i < taxonList.size(); i++) {
+            Taxon taxon = taxonList.get(i);
+
             // Allocates a Date object and initializes it to represent the specified number of milliseconds since the
             // standard base time known as "the epoch", namely January 1, 1970, 00:00:00 GMT
             java.util.Date origin = new java.util.Date(0);
@@ -89,22 +107,28 @@ public class DateGuesser implements Serializable {
             double[] values = new double[2];
 
             try {
-                switch (guessType) {
-                    case ORDER:
-                        guessDateFromOrder(taxonList.get(i).getId(), order, fromLast, values);
-                        break;
-                    case PREFIX:
-                        guessDateFromPrefix(taxonList.get(i).getId(), prefix, order, fromLast, values);
-                        break;
-                    case REGEX:
-                        guessDateFromRegex(taxonList.get(i).getId(), regex, values);
-                        break;
-                    default:
-                        throw new IllegalArgumentException("unknown GuessType");
+
+                if (taxonDateMap != null) {
+                    String dateString = taxonDateMap.get(taxon);
+                    parseDate(taxon.getId(), dateString, values);
+                } else {
+                    switch (guessType) {
+                        case ORDER:
+                            guessDateFromOrder(taxonList.get(i).getId(), order, fromLast, values);
+                            break;
+                        case PREFIX:
+                            guessDateFromPrefix(taxonList.get(i).getId(), prefix, order, fromLast, values);
+                            break;
+                        case REGEX:
+                            guessDateFromRegex(taxonList.get(i).getId(), regex, values);
+                            break;
+                        default:
+                            throw new IllegalArgumentException("unknown GuessType");
+                    }
                 }
 
             } catch (GuessDatesException gfe) {
-                //
+                // @todo catch errors and give to user
             }
 
             double d = values[0];
@@ -120,9 +144,11 @@ public class DateGuesser implements Serializable {
                 }
             }
 
+            // @todo if any taxa aren't set then return warning
+
             Date date = Date.createTimeSinceOrigin(d, Units.Type.YEARS, origin);
             date.setPrecision(values[1]);
-            taxonList.get(i).setAttribute("date", date);
+            taxon.setAttribute("date", date);
         }
     }
 
@@ -281,6 +307,18 @@ public class DateGuesser implements Serializable {
     }
 
 
+    private void parseDateFromValue(String label, HashMap<String, String> myload, double[] values) throws GuessDatesException {
+        String dateStr = "";
+        if (myload.containsKey(label)) {
+            dateStr = (String)(myload.get(label));
+        } else {
+            throw new GuessDatesException("The imported table doesn't contain the taxon label, " + label);
+        }
+
+        parseDate(label, dateStr, values);
+    }
+
+
     private DateFormat dateFormat1 = null;
     private DateFormat dateFormat2 = null;
     private DateFormat dateFormat3 = null;
@@ -321,7 +359,7 @@ public class DateGuesser implements Serializable {
                         p = 1.0;
 
                     } catch (ParseException pe3) {
-                        throw new GuessDatesException("Badly formatted date in taxon label, " + label);
+                        throw new GuessDatesException("Badly formatted date for taxon, " + label);
                     }
                 }
             }
@@ -333,14 +371,14 @@ public class DateGuesser implements Serializable {
 
                 d = date.getTimeValue();
             } catch (ParseException pe) {
-                throw new GuessDatesException("Badly formatted date in taxon label, " + label);
+                throw new GuessDatesException("Badly formatted date for taxon, " + label);
             }
 
         } else {
             try {
                 d = Double.parseDouble(value);
             } catch (NumberFormatException nfe) {
-                throw new GuessDatesException("Badly formatted date in taxon label, " + label);
+                throw new GuessDatesException("Badly formatted date for taxon, " + label);
             }
         }
 
diff --git a/src/dr/app/beauti/options/ModelOptions.java b/src/dr/app/beauti/options/ModelOptions.java
index aba755d..2f30e88 100644
--- a/src/dr/app/beauti/options/ModelOptions.java
+++ b/src/dr/app/beauti/options/ModelOptions.java
@@ -50,11 +50,11 @@ public class ModelOptions implements Serializable {
     protected final Map<TaxonList, Parameter> statistics = new HashMap<TaxonList, Parameter>();
 
     public static final double demoTuning = 0.75;
-    public static final double demoWeights = 3.0;
+    public static final double demoWeights = 30.0;
 
-    protected static final double branchWeights = 30.0;
-    protected static final double treeWeights = 15.0;
-    protected static final double rateWeights = 3.0;
+    protected static final double branchWeights = 300.0;
+    protected static final double treeWeights = 150.0;
+    protected static final double rateWeights = 30.0;
 
     private final List<ComponentOptions> components = new ArrayList<ComponentOptions>();
 
@@ -67,14 +67,28 @@ public class ModelOptions implements Serializable {
         return new Parameter.Builder(name, description).initial(initial).isFixed(true).build(parameters);
     }
 
+    public Parameter createZeroOneParameter(String name, String description, double initial) {
+        return new Parameter.Builder(name, description)
+                .initial(initial).isZeroOne(true).build(parameters);
+    }
+
     public Parameter createZeroOneParameterUniformPrior(String name, String description, double initial) {
         return new Parameter.Builder(name, description).prior(PriorType.UNIFORM_PRIOR)
                 .initial(initial).isZeroOne(true).build(parameters);
     }
 
+    public Parameter createNonNegativeParameterDirichletPrior(String name, String description, PartitionOptions options, PriorScaleType scaleType, double initial) {
+        return new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.DIRICHLET_PRIOR).isNonNegative(true)
+                .partitionOptions(options).initial(initial).build(parameters);
+    }
+
     public Parameter createNonNegativeParameterInfinitePrior(String name, String description, PriorScaleType scaleType, double initial) {
+        return createNonNegativeParameterInfinitePrior(name, description, null, scaleType, initial);
+    }
+
+    public Parameter createNonNegativeParameterInfinitePrior(String name, String description, PartitionOptions options, PriorScaleType scaleType, double initial) {
         return new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.NONE_IMPROPER).isNonNegative(true)
-                .initial(initial).build(parameters);
+                .partitionOptions(options).initial(initial).build(parameters);
     }
 
     public Parameter createNonNegativeParameterUniformPrior(String name, String description, PriorScaleType scaleType, double initial,
diff --git a/src/dr/app/beauti/options/Parameter.java b/src/dr/app/beauti/options/Parameter.java
index 4abf3f6..1c87d70 100644
--- a/src/dr/app/beauti/options/Parameter.java
+++ b/src/dr/app/beauti/options/Parameter.java
@@ -25,6 +25,7 @@
 
 package dr.app.beauti.options;
 
+import cern.colt.bitvector.QuickBitVector;
 import dr.app.beauti.types.PriorScaleType;
 import dr.app.beauti.types.PriorType;
 import dr.math.distributions.Distribution;
@@ -493,6 +494,14 @@ public class Parameter implements Serializable {
         this.meanInRealSpace = meanInRealSpace;
     }
 
+    public int[] getParameterDimensionWeights() {
+        if (getOptions() != null && getOptions() instanceof PartitionSubstitutionModel) {
+            return ((PartitionSubstitutionModel)getOptions()).getPartitionCodonWeights();
+        }
+        return new int[] { 1 };
+    }
+
+
     @Override
     public String toString() {
         return getName();
diff --git a/src/dr/app/beauti/options/PartitionClockModel.java b/src/dr/app/beauti/options/PartitionClockModel.java
index 35cd7bc..a3b1b58 100644
--- a/src/dr/app/beauti/options/PartitionClockModel.java
+++ b/src/dr/app/beauti/options/PartitionClockModel.java
@@ -39,10 +39,12 @@ import java.util.List;
 public class PartitionClockModel extends PartitionOptions {
     private static final long serialVersionUID = -6904595851602060488L;
 
-    private static final boolean DEFAULT_CMTC_RATE_REFERENCE_PRIOR = false;
+    private static final boolean DEFAULT_CMTC_RATE_REFERENCE_PRIOR = true;
 
     private ClockType clockType = ClockType.STRICT_CLOCK;
     private ClockDistributionType clockDistributionType = ClockDistributionType.LOGNORMAL;
+    private boolean continuousQuantile = false;
+
     private double rate; // move to initModelParametersAndOpererators() to initial
 
     private ClockModelGroup clockModelGroup = null;
@@ -101,6 +103,10 @@ public class PartitionClockModel extends PartitionOptions {
             new Parameter.Builder(ClockType.UCLD_MEAN, "uncorrelated lognormal relaxed clock mean").
                     prior(PriorType.CTMC_RATE_REFERENCE_PRIOR).initial(rate)
                     .isCMTCRate(true).isNonNegative(true).partitionOptions(this).build(parameters);
+
+            new Parameter.Builder(ClockType.UCGD_MEAN, "uncorrelated gamma relaxed clock mean").
+                    prior(PriorType.CTMC_RATE_REFERENCE_PRIOR).initial(rate)
+                    .isCMTCRate(true).isNonNegative(true).partitionOptions(this).build(parameters);
         } else {
             new Parameter.Builder("clock.rate", "substitution rate").
                     prior(PriorType.UNDEFINED).initial(rate)
@@ -113,12 +119,20 @@ public class PartitionClockModel extends PartitionOptions {
             new Parameter.Builder(ClockType.UCLD_MEAN, "uncorrelated lognormal relaxed clock mean").
                     prior(PriorType.UNDEFINED).initial(rate)
                     .isCMTCRate(true).isNonNegative(true).partitionOptions(this).build(parameters);
+
+            new Parameter.Builder(ClockType.UCGD_MEAN, "uncorrelated gamma relaxed clock mean").
+                    prior(PriorType.UNDEFINED).initial(rate)
+                    .isCMTCRate(true).isNonNegative(true).partitionOptions(this).build(parameters);
         }
 
         new Parameter.Builder(ClockType.UCLD_STDEV, "uncorrelated lognormal relaxed clock stdev").
                 scaleType(PriorScaleType.LOG_STDEV_SCALE).prior(PriorType.EXPONENTIAL_PRIOR).isNonNegative(true)
                 .initial(1.0 / 3.0).mean(1.0 / 3.0).offset(0.0).partitionOptions(this).build(parameters);
 
+        new Parameter.Builder(ClockType.UCGD_SHAPE, "uncorrelated gamma relaxed clock shape").
+                prior(PriorType.EXPONENTIAL_PRIOR).isNonNegative(true)
+                .initial(1.0 / 3.0).mean(1.0 / 3.0).offset(0.0).partitionOptions(this).build(parameters);
+
         // Random local clock
         createParameterGammaPrior(ClockType.LOCAL_CLOCK + ".relativeRates", "random local clock relative rates",
                 PriorScaleType.SUBSTITUTION_RATE_SCALE, 1.0, 0.5, 2.0, false);
@@ -128,6 +142,8 @@ public class PartitionClockModel extends PartitionOptions {
         createScaleOperator(ClockType.UCED_MEAN, demoTuning, rateWeights);
         createScaleOperator(ClockType.UCLD_MEAN, demoTuning, rateWeights);
         createScaleOperator(ClockType.UCLD_STDEV, demoTuning, rateWeights);
+        createScaleOperator(ClockType.UCGD_MEAN, demoTuning, rateWeights);
+        createScaleOperator(ClockType.UCGD_SHAPE, demoTuning, rateWeights);
         // Random local clock
         createScaleOperator(ClockType.LOCAL_CLOCK + ".relativeRates", demoTuning, treeWeights);
         createOperator(ClockType.LOCAL_CLOCK + ".changes", OperatorType.BITFLIP, 1, treeWeights);
@@ -173,26 +189,22 @@ public class PartitionClockModel extends PartitionOptions {
                 case UNCORRELATED:
                     switch (clockDistributionType) {
                         case LOGNORMAL:
-//                            rateParam = getParameter(ClockType.UCLD_MEAN);
                             params.add(getParameter(ClockType.UCLD_STDEV));
                             break;
                         case GAMMA:
-                            throw new UnsupportedOperationException("Uncorrelated gamma clock not implemented yet");
-//                            rateParam = getParameter(ClockType.UCGD_SCALE);
-//                            params.add(getParameter(ClockType.UCGD_SHAPE));
-//                            break;
+                            params.add(getParameter(ClockType.UCGD_MEAN));
+                            params.add(getParameter(ClockType.UCGD_SHAPE));
+                            break;
                         case CAUCHY:
                             throw new UnsupportedOperationException("Uncorrelated Cauchy clock not implemented yet");
 //                            break;
                         case EXPONENTIAL:
-//                            rateParam = getParameter(ClockType.UCED_MEAN);
                             break;
                     }
                     break;
 
                 case AUTOCORRELATED:
                     throw new UnsupportedOperationException("Autocorrelated clock not implemented yet");
-//                    rateParam = getParameter("treeModel.rootRate");//TODO fix tree?
 //                    params.add(getParameter("branchRates.var"));
 //                    break;
 
@@ -245,9 +257,8 @@ public class PartitionClockModel extends PartitionOptions {
                         rateParam = getParameter(ClockType.UCLD_MEAN);
                         break;
                     case GAMMA:
-                        throw new UnsupportedOperationException("Uncorrelated gamma clock not implemented yet");
-//                            rateParam = getParameter(ClockType.UCGD_SCALE);
-//                            break;
+                        rateParam = getParameter(ClockType.UCGD_MEAN);
+                        break;
                     case CAUCHY:
                         throw new UnsupportedOperationException("Uncorrelated Cauchy clock not implemented yet");
 //                            break;
@@ -305,13 +316,12 @@ public class PartitionClockModel extends PartitionOptions {
                                 ops.add(getOperator(ClockType.UCLD_STDEV));
                                 break;
                             case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma clock not implemented yet");
-//                                ops.add(getOperator(ClockType.UCGD_SCALE));
-//                                ops.add(getOperator(ClockType.UCGD_SHAPE));
-//                                break;
+                                ops.add(getOperator(ClockType.UCGD_MEAN));
+                                ops.add(getOperator(ClockType.UCGD_SHAPE));
+                                break;
                             case CAUCHY:
-                                throw new UnsupportedOperationException("Uncorrelated Couchy clock not implemented yet");
-//                                break;
+//                                throw new UnsupportedOperationException("Uncorrelated Couchy clock not implemented yet");
+                                break;
                             case EXPONENTIAL:
                                 ops.add(getOperator(ClockType.UCED_MEAN));
                                 break;
@@ -337,12 +347,11 @@ public class PartitionClockModel extends PartitionOptions {
                                 ops.add(getOperator(ClockType.UCLD_STDEV));
                                 break;
                             case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma clock not implemented yet");
-//                                ops.add(getOperator(ClockType.UCGD_SCALE));
-//                                break;
+                                ops.add(getOperator(ClockType.UCGD_SHAPE));
+                                break;
                             case CAUCHY:
-                                throw new UnsupportedOperationException("Uncorrelated Cauchy clock not implemented yet");
-//                                break;
+//                                throw new UnsupportedOperationException("Uncorrelated Cauchy clock not implemented yet");
+                                break;
                             case EXPONENTIAL:
                                 break;
                         }
@@ -394,6 +403,14 @@ public class PartitionClockModel extends PartitionOptions {
         this.clockDistributionType = clockDistributionType;
     }
 
+    public boolean isContinuousQuantile() {
+        return continuousQuantile;
+    }
+
+    public void setContinuousQuantile(boolean continuousQuantile) {
+        this.continuousQuantile = continuousQuantile;
+    }
+
     // important to set all clock rate rateParam.isFixed same, which keeps isEstimatedRate() correct when change clock type
     public void setEstimatedRate(boolean isEstimatedRate) {
 //        for (ClockType clockType : new ClockType[]{ClockType.STRICT_CLOCK, ClockType.UNCORRELATED, ClockType.RANDOM_LOCAL_CLOCK}) {
diff --git a/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java b/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
index b5891d5..9b615c9 100644
--- a/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
+++ b/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
@@ -65,6 +65,7 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
         createParameterGammaPrior("branchRates.var", "autocorrelated lognormal relaxed clock rate variance",
                 PriorScaleType.LOG_VAR_SCALE, 0.1, 1, 0.0001, false);
         createParameter("branchRates.categories", "relaxed clock branch rate categories");
+        createZeroOneParameter("branchRates.quantiles", "relaxed clock branch rate quantiles", 0.5);
 
 //        {
 //            final Parameter p = createParameter("treeModel.rootRate", "autocorrelated lognormal relaxed clock root rate", PriorScaleType.ROOT_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
@@ -97,6 +98,9 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
         createOperator("uniformBranchRateCategories", "branchRates.categories", "Performs an integer uniform draw of branch rate categories",
                 "branchRates.categories", OperatorType.INTEGER_UNIFORM, 1, branchWeights / 3);
 
+        createOperator("uniformBranchRateQuantiles", "branchRates.quantiles", "Performs an uniform draw of branch rate quantiles",
+                "branchRates.quantiles", OperatorType.UNIFORM, 0, branchWeights);
+
         createUpDownOperator("upDownRateHeights", "Substitution rate and heights",
                 "Scales substitution rates inversely to node heights of the tree", model.getParameter("clock.rate"),
                 tree.getParameter("treeModel.allInternalNodeHeights"), OperatorType.UP_DOWN, true, demoTuning, rateWeights);
@@ -106,6 +110,9 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
         createUpDownOperator("upDownUCLDMeanHeights", "UCLD mean and heights",
                 "Scales UCLD mean inversely to node heights of the tree", model.getParameter(ClockType.UCLD_MEAN),
                 tree.getParameter("treeModel.allInternalNodeHeights"), OperatorType.UP_DOWN, true, demoTuning, rateWeights);
+        createUpDownOperator("upDownUCGDMeanHeights", "UCGD mean and heights",
+                "Scales UCGD mean inversely to node heights of the tree", model.getParameter(ClockType.UCGD_MEAN),
+                tree.getParameter("treeModel.allInternalNodeHeights"), OperatorType.UP_DOWN, true, demoTuning, rateWeights);
 
 
         // These are statistics which could have priors on...
@@ -185,12 +192,13 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
                                 op = getOperator("upDownUCLDMeanHeights");
                                 op.setClockModelGroup(model.getClockModelGroup());
                                 ops.add(op);
-
-                                addBranchRateCategories(ops);
                                 break;
                             case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
-//                            break;
+//                                throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
+                                op = getOperator("upDownUCGDMeanHeights");
+                                op.setClockModelGroup(model.getClockModelGroup());
+                                ops.add(op);
+                            break;
                             case CAUCHY:
                                 throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
 //                            break;
@@ -198,10 +206,14 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
                                 op = getOperator("upDownUCEDMeanHeights");
                                 op.setClockModelGroup(model.getClockModelGroup());
                                 ops.add(op);
-
-                                addBranchRateCategories(ops);
                                 break;
                         }
+                        if (model.isContinuousQuantile()) {
+                            ops.add(getOperator("uniformBranchRateQuantiles"));
+                        } else {
+                            ops.add(getOperator("swapBranchRateCategories"));
+                            ops.add(getOperator("uniformBranchRateCategories"));
+                        }
                         break;
 
                     case AUTOCORRELATED:
@@ -237,13 +249,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
         }
     }
 
-    private void addBranchRateCategories(List<Operator> ops) {
-        ops.add(getOperator("swapBranchRateCategories"));
-//        ops.add(getOperator("randomWalkBranchRateCategories"));
-        ops.add(getOperator("uniformBranchRateCategories"));
-    }
-//TODO    ops.add(tree.getOperator("treeBitMove"));
-
     /**
      * return a list of parameters that are required
      *
diff --git a/src/dr/app/beauti/options/PartitionOptions.java b/src/dr/app/beauti/options/PartitionOptions.java
index bc6acf7..10f83b7 100644
--- a/src/dr/app/beauti/options/PartitionOptions.java
+++ b/src/dr/app/beauti/options/PartitionOptions.java
@@ -105,10 +105,6 @@ public abstract class PartitionOptions extends ModelOptions {
                 .isNonNegative(true).initial(value).partitionOptions(options).build(parameters);
     }
 
-    protected void createAllMusParameter(PartitionOptions options, String name, String description) {
-        new Parameter.Builder(name, description).partitionOptions(options).build(parameters);
-    }
-
     public Parameter getParameter(String name) {
 
         Parameter parameter = parameters.get(name);
diff --git a/src/dr/app/beauti/options/PartitionSubstitutionModel.java b/src/dr/app/beauti/options/PartitionSubstitutionModel.java
index 87c4f1f..b796aef 100644
--- a/src/dr/app/beauti/options/PartitionSubstitutionModel.java
+++ b/src/dr/app/beauti/options/PartitionSubstitutionModel.java
@@ -47,6 +47,8 @@ import java.util.Set;
 public class PartitionSubstitutionModel extends PartitionOptions {
     private static final long serialVersionUID = -2570346396317131108L;
 
+    private final static boolean USE_DIRICHLET_PRIOR_FOR_MUS = false;
+
     // Instance variables
 
     public static final String[] GTR_RATE_NAMES = {"ac", "ag", "at", "cg", "gt"};
@@ -139,7 +141,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
 
     // only init in PartitionSubstitutionModel
     protected void initModelParametersAndOpererators() {
-        double substWeights = 0.1;
+        double substWeights = 1.0;
 
         //Substitution model parameters
         createZeroOneParameterUniformPrior("frequencies", "base frequencies", 0.25);
@@ -253,15 +255,18 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
 
         // A vector of relative rates across all partitions...
-        createAllMusParameter(this, "allMus", "All the relative rates regarding codon positions");
 
-        // This only works if the partitions are of the same size...
-//      createOperator("centeredMu", "Relative rates",
-//              "Scales codon position rates relative to each other maintaining mean", "allMus",
-//              OperatorType.CENTERED_SCALE, 0.75, 3.0);
-        createOperator("deltaMu", RelativeRatesType.MU_RELATIVE_RATES.toString(),
-                "Currently use to scale codon position rates relative to each other maintaining mean", "allMus",
-                OperatorType.DELTA_EXCHANGE, 0.75, 3.0);
+        if (USE_DIRICHLET_PRIOR_FOR_MUS) {
+            createNonNegativeParameterDirichletPrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
+            createOperator("scaleMus", RelativeRatesType.MU_RELATIVE_RATES.toString(),
+                    "Scale codon position rates relative to each other", "allMus",
+                    OperatorType.SCALE_INDEPENDENTLY, 0.75, 3.0);
+        } else {
+            createNonNegativeParameterInfinitePrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
+            createOperator("deltaMus", RelativeRatesType.MU_RELATIVE_RATES.toString(),
+                    "Scale codon position rates relative to each other maintaining mean", "allMus",
+                    OperatorType.DELTA_EXCHANGE, 0.75, 3.0);
+        }
 
         createScaleOperator("kappa", demoTuning, substWeights);
         createScaleOperator("CP1.kappa", demoTuning, substWeights);
@@ -344,6 +349,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 if (includeRelativeRates && unlinkedSubstitutionModel) {
                     if (codonHeteroPattern.equals("123")) {
                         switch (nucSubstitutionModel) {
+                            case JC:
+                                break;
                             case HKY:
                                 params.add(getParameter("CP1.kappa"));
                                 params.add(getParameter("CP2.kappa"));
@@ -371,6 +378,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
 
                     } else if (codonHeteroPattern.equals("112")) {
                         switch (nucSubstitutionModel) {
+                            case JC:
+                                break;
                             case HKY:
                                 params.add(getParameter("CP1+2.kappa"));
                                 params.add(getParameter("CP3.kappa"));
@@ -399,6 +408,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                     }
                 } else { // no codon partitioning, or unlinkedSubstitutionModel
                     switch (nucSubstitutionModel) {
+                        case JC:
+                            break;
                         case HKY:
                             params.add(getParameter("kappa"));
                             break;
@@ -417,30 +428,14 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                     }
                 }
 
-                if (includeRelativeRates) {
-                    if (codonHeteroPattern.equals("123")) {
-                        params.add(getParameter("CP1.mu"));
-                        params.add(getParameter("CP2.mu"));
-                        params.add(getParameter("CP3.mu"));
-                    } else if (codonHeteroPattern.equals("112")) {
-                        params.add(getParameter("CP1+2.mu"));
-                        params.add(getParameter("CP3.mu"));
-                    } else {
-                        throw new IllegalArgumentException("codonHeteroPattern must be one of '111', '112' or '123'");
-                    }
-
-                } else { // no codon partitioning
-//TODO
-                }
-
                 // only AMINO_ACIDS not addFrequency
                 addFrequencyParams(params, includeRelativeRates);
                 break;
 
             case DataType.AMINO_ACIDS:
-                if (includeRelativeRates) {
-                    params.add(getParameter("mu"));
-                }
+//                if (includeRelativeRates) {
+//                    params.add(getParameter("mu"));
+//                }
                 break;
 
             case DataType.TWO_STATES:
@@ -460,9 +455,9 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                     default:
                         throw new IllegalArgumentException("Unknown binary substitution model");
                 }
-                if (includeRelativeRates) {
-                    params.add(getParameter("mu"));
-                }
+//                if (includeRelativeRates) {
+//                    params.add(getParameter("mu"));
+//                }
 
                 // only AMINO_ACIDS not addFrequency
                 addFrequencyParams(params, includeRelativeRates);
@@ -541,7 +536,10 @@ public class PartitionSubstitutionModel extends PartitionOptions {
             }
         }
 
-        if (hasCodon()) getParameter("allMus");
+        if (includeRelativeRates) {
+            params.add(getParameter("allMus"));
+//                    params.add(getParameter("mu"));
+        }
     }
 
     private void addFrequencyParams(List<Parameter> params, boolean includeRelativeRates) {
@@ -573,6 +571,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 if (includeRelativeRates && unlinkedSubstitutionModel) {
                     if (codonHeteroPattern.equals("123")) {
                         switch (nucSubstitutionModel) {
+                            case JC:
+                                break;
                             case HKY:
                                 ops.add(getOperator("CP1.kappa"));
                                 ops.add(getOperator("CP2.kappa"));
@@ -602,6 +602,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
 
                     } else if (codonHeteroPattern.equals("112")) {
                         switch (nucSubstitutionModel) {
+                            case JC:
+                                break;
                             case HKY:
                                 ops.add(getOperator("CP1+2.kappa"));
                                 ops.add(getOperator("CP3.kappa"));
@@ -633,6 +635,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
 
                 } else { // no codon partitioning, or unlinkedSubstitutionModel
                     switch (nucSubstitutionModel) {
+                        case JC:
+                            break;
                         case HKY:
                             ops.add(getOperator("kappa"));
                             break;
@@ -706,7 +710,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 if (phase == MicroSatModelType.Phase.ONE_PHASE) {
 
                 } else if (phase == MicroSatModelType.Phase.TWO_PHASE) {
-                      ops.add(getOperator("randomWalkGeom"));
+                    ops.add(getOperator("randomWalkGeom"));
                 } else if (phase == MicroSatModelType.Phase.TWO_PHASE_STAR) {
 //                    ops.add(getOperator("randomWalkGeom"));
 //                    ops.add(getOperator("onePhaseProb"));
@@ -752,16 +756,15 @@ public class PartitionSubstitutionModel extends PartitionOptions {
             }
         }
 
-        if (hasCodon()) {
-            Operator deltaMuOperator = getOperator("deltaMu");
+        if (includeRelativeRates) {
+            Operator muOperator;
 
-            // update delta mu operator weight
-            deltaMuOperator.weight = 0.0;
-            for (PartitionSubstitutionModel pm : options.getPartitionSubstitutionModels()) {
-                deltaMuOperator.weight += pm.getCodonPartitionCount();
+            if (USE_DIRICHLET_PRIOR_FOR_MUS) {
+                muOperator = getOperator("scaleMus");
+            } else {
+                muOperator = getOperator("deltaMus");
             }
-
-            ops.add(deltaMuOperator);
+            ops.add(muOperator);
         }
     }
 
@@ -1037,7 +1040,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
     public String getPrefix() {
         String prefix = "";
         if (options.getPartitionSubstitutionModels(Nucleotides.INSTANCE).size() +
-            options.getPartitionSubstitutionModels(AminoAcids.INSTANCE).size()  > 1) {
+                options.getPartitionSubstitutionModels(AminoAcids.INSTANCE).size()  > 1) {
             // There is more than one active partition model, or doing species analysis
             prefix += getName() + ".";
         }
diff --git a/src/dr/app/beauti/options/PartitionTreeModel.java b/src/dr/app/beauti/options/PartitionTreeModel.java
index 3da06b2..c176476 100644
--- a/src/dr/app/beauti/options/PartitionTreeModel.java
+++ b/src/dr/app/beauti/options/PartitionTreeModel.java
@@ -25,10 +25,7 @@
 
 package dr.app.beauti.options;
 
-import dr.app.beauti.types.OperatorType;
-import dr.app.beauti.types.PriorType;
-import dr.app.beauti.types.StartingTreeType;
-import dr.app.beauti.types.TreePriorType;
+import dr.app.beauti.types.*;
 import dr.evolution.datatype.PloidyType;
 import dr.evolution.tree.Tree;
 
@@ -75,7 +72,6 @@ public class PartitionTreeModel extends PartitionOptions {
         userStartingTree = source.userStartingTree;
 
         isNewick = source.isNewick;
-        fixedTree = source.fixedTree;
 //        initialRootHeight = source.initialRootHeight;
         ploidyType = source.ploidyType;
     }
@@ -103,6 +99,10 @@ public class PartitionTreeModel extends PartitionOptions {
                 OperatorType.WIDE_EXCHANGE, -1, demoWeights);
         createOperator("wilsonBalding", "Tree", "Performs the Wilson-Balding rearrangement of the tree", "tree",
                 OperatorType.WILSON_BALDING, -1, demoWeights);
+
+        createOperator("subtreeLeap", "Tree", "Performs the subtree-leap rearrangement of the tree", "tree",
+                OperatorType.SUBTREE_LEAP, 1.0, options.taxonList.getTaxonCount());
+
     }
 
     /**
@@ -141,21 +141,51 @@ public class PartitionTreeModel extends PartitionOptions {
     public void selectOperators(List<Operator> operators) {
         setAvgRootAndRate();
 
-        // if not a fixed tree then sample tree space
-        if (!fixedTree) {
-            Operator subtreeSlideOp = getOperator("subtreeSlide");
-            if (!subtreeSlideOp.tuningEdited) {
-                subtreeSlideOp.tuning = getInitialRootHeight() / 10.0;
-            }
-
-            operators.add(subtreeSlideOp);
-            operators.add(getOperator("narrowExchange"));
-            operators.add(getOperator("wideExchange"));
-            operators.add(getOperator("wilsonBalding"));
+        Operator subtreeSlideOp = getOperator("subtreeSlide");
+        if (!subtreeSlideOp.tuningEdited) {
+            subtreeSlideOp.tuning = getInitialRootHeight() / 10.0;
         }
 
+        operators.add(subtreeSlideOp);
+        operators.add(getOperator("narrowExchange"));
+        operators.add(getOperator("wideExchange"));
+        operators.add(getOperator("wilsonBalding"));
+
         operators.add(getOperator("treeModel.rootHeight"));
         operators.add(getOperator("uniformHeights"));
+
+        operators.add(getOperator("subtreeLeap"));
+
+        boolean defaultInUse;
+        boolean branchesInUse;
+        boolean newMixInUse;
+
+        // if not a fixed tree then sample tree space
+        if (options.operatorSetType == OperatorSetType.DEFAULT) {
+            defaultInUse = true;
+            branchesInUse = true;
+            newMixInUse = false;
+        } else if (options.operatorSetType == OperatorSetType.NEW_TREE_MIX) {
+            defaultInUse = false;
+            branchesInUse = false;
+            newMixInUse = true;
+        } else if (options.operatorSetType == OperatorSetType.FIXED_TREE_TOPOLOGY) {
+            defaultInUse = false;
+            branchesInUse = true;
+            newMixInUse = false;
+        } else {
+            throw new IllegalArgumentException("Unknown operator set type");
+        }
+
+        getOperator("subtreeSlide").inUse = defaultInUse;
+        getOperator("narrowExchange").inUse = defaultInUse;
+        getOperator("wideExchange").inUse = defaultInUse;
+        getOperator("wilsonBalding").inUse = defaultInUse;
+
+        getOperator("treeModel.rootHeight").inUse = branchesInUse;
+        getOperator("uniformHeights").inUse = branchesInUse;
+
+        getOperator("subtreeLeap").inUse = newMixInUse;
     }
 
     /////////////////////////////////////////////////////////////
diff --git a/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java b/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
index 408eff4..dec3102 100644
--- a/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
+++ b/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
@@ -62,7 +62,7 @@ public class PartitionModelPanel extends OptionsPanel {
     private static final long serialVersionUID = -1645661616353099424L;
 
     private JComboBox nucSubstCombo = new JComboBox(EnumSet.range(
-            NucModelType.HKY, NucModelType.TN93).toArray());
+            NucModelType.JC, NucModelType.TN93).toArray());
     private JComboBox aaSubstCombo = new JComboBox(AminoAcidModelType.values());
     private JComboBox binarySubstCombo = new JComboBox(
             new BinaryModelType[] { BinaryModelType.BIN_SIMPLE, BinaryModelType.BIN_COVARION });
@@ -149,6 +149,12 @@ public class PartitionModelPanel extends OptionsPanel {
             public void itemStateChanged(ItemEvent ev) {
                 model.setNucSubstitutionModel((NucModelType) nucSubstCombo
                         .getSelectedItem());
+                if (model.getNucSubstitutionModel() == NucModelType.JC) {
+                    frequencyCombo.getSelectedItem();
+
+                    frequencyCombo.setSelectedItem(FrequencyPolicyType.ALLEQUAL);
+                }
+                frequencyCombo.setEnabled(model.getNucSubstitutionModel() != NucModelType.JC);
             }
         });
         nucSubstCombo
diff --git a/src/dr/app/beauti/tipdatepanel/GuessDatesDialog.java b/src/dr/app/beauti/tipdatepanel/GuessDatesDialog.java
index bb9e7f3..96f8bf1 100644
--- a/src/dr/app/beauti/tipdatepanel/GuessDatesDialog.java
+++ b/src/dr/app/beauti/tipdatepanel/GuessDatesDialog.java
@@ -32,6 +32,9 @@ import dr.app.gui.components.RealNumberField;
 import jam.mac.Utils;
 import jam.panels.OptionsPanel;
 
+
+import java.io.*;
+import javax.swing.filechooser.FileNameExtensionFilter;
 import javax.swing.*;
 import javax.swing.border.EmptyBorder;
 import java.awt.*;
@@ -52,6 +55,7 @@ public class GuessDatesDialog {
     public static final String ORDER_COMBO_KEY = "orderCombo";
     public static final String PREFIX_TEXT_KEY = "prefixText";
     public static final String REGEX_TEXT_KEY = "regexText";
+    public static final String LOAD_TEXT_KEY = "loadText";
     public static final String PARSE_RADIO_KEY = "parseRadio";
     public static final String OFFSET_CHECK_KEY = "offsetCheck";
     public static final String OFFSET_TEXT_KEY = "offsetText";
@@ -62,12 +66,16 @@ public class GuessDatesDialog {
 
     private JFrame frame;
 
+    private File loadFile;
+
     private final OptionsPanel optionPanel;
 
     private final JRadioButton orderRadio = new JRadioButton("Defined just by its order", true);
     private final JComboBox orderCombo = new JComboBox(new String[]{"first", "second", "third",
             "fourth", "fourth from last",
             "third from last", "second from last", "last"});
+    private final JLabel orderLabel = new JLabel("Order:");
+    private final JLabel prefixLabel = new JLabel("Prefix:");
 
     private final JRadioButton prefixRadio = new JRadioButton("Defined by a prefix and its order", false);
     private final JTextField prefixText = new JTextField(16);
@@ -90,41 +98,60 @@ public class GuessDatesDialog {
     private final JTextField dateFormatText = new JTextField(16);
     private String description = "Guess Dates for Taxa";
 
+    private final int defaultDelimitRadioOption;
+    private final int defaultOrderCombo;
+    private final String defaultPrefixText;
+    private final String defaultRegexText;
+    private final int defaultParseRadioOption;
+    private final boolean defaultOffsetCheckOption;
+    private final String defaultOffsetText;
+    private final boolean defaultUnlessCheckOption;
+    private final String defaultUnlessText;
+    private final String defaultOffset2Text;
+    private final String defaultDateFormatText;
+
     public GuessDatesDialog(final JFrame frame) {
         this.frame = frame;
-
-        final int defaultDelimitRadioOption = PREFS.getInt(DELIMIT_RADIO_KEY, 0);
-        final int defaultOrderCombo = PREFS.getInt(ORDER_COMBO_KEY, 0);
-        final String defaultPrefixText = PREFS.get(PREFIX_TEXT_KEY, "");
-        final String defaultRegexText = PREFS.get(REGEX_TEXT_KEY, "");
-        final int defaultParseRadioOption = PREFS.getInt(PARSE_RADIO_KEY, 0);
-        final boolean defaultOffsetCheckOption = PREFS.getBoolean(OFFSET_CHECK_KEY, false);
-        final String defaultOffsetText = PREFS.get(OFFSET_TEXT_KEY, "1900");
-        final boolean defaultUnlessCheckOption = PREFS.getBoolean(UNLESS_CHECK_KEY, false);
-        final String defaultUnlessText = PREFS.get(UNLESS_TEXT_KEY, "16");
-        final String defaultOffset2Text = PREFS.get(OFFSET2_TEXT_KEY, "2000");
-        final String defaultDateFormatText = PREFS.get(DATE_FORMAT_TEXT_KEY, "yyyy-MM-dd");
+        defaultDelimitRadioOption = PREFS.getInt(DELIMIT_RADIO_KEY, 0);
+        defaultOrderCombo = PREFS.getInt(ORDER_COMBO_KEY, 0);
+        defaultPrefixText = PREFS.get(PREFIX_TEXT_KEY, "");
+        defaultRegexText = PREFS.get(REGEX_TEXT_KEY, "");
+        defaultParseRadioOption = PREFS.getInt(PARSE_RADIO_KEY, 0);
+        defaultOffsetCheckOption = PREFS.getBoolean(OFFSET_CHECK_KEY, false);
+        defaultOffsetText = PREFS.get(OFFSET_TEXT_KEY, "1900");
+        defaultUnlessCheckOption = PREFS.getBoolean(UNLESS_CHECK_KEY, false);
+        defaultUnlessText = PREFS.get(UNLESS_TEXT_KEY, "16");
+        defaultOffset2Text = PREFS.get(OFFSET2_TEXT_KEY, "2000");
+        defaultDateFormatText = PREFS.get(DATE_FORMAT_TEXT_KEY, "yyyy-MM-dd");
 
         optionPanel = new OptionsPanel(12, 12);
+    }
+
+    private void setupPanel(boolean parsingFromFile) {
 
-        optionPanel.addLabel("The date is given by a numerical field in the taxon label that is:");
+        optionPanel.removeAll();
 
-        optionPanel.addSpanningComponent(orderRadio);
+        if (parsingFromFile) {
+
+        } else {
+            optionPanel.addLabel("The date is given by a numerical field in the taxon label that is:");
+
+            optionPanel.addSpanningComponent(orderRadio);
 //        optionPanel.addSeparator();
 
-        optionPanel.addSpanningComponent(prefixRadio);
+            optionPanel.addSpanningComponent(prefixRadio);
 
-        final JLabel orderLabel = new JLabel("Order:");
-        optionPanel.addComponents(orderLabel, orderCombo);
-        final JLabel prefixLabel = new JLabel("Prefix:");
-        optionPanel.addComponents(prefixLabel, prefixText);
+            optionPanel.addComponents(orderLabel, orderCombo);
+            optionPanel.addComponents(prefixLabel, prefixText);
 
-        prefixLabel.setEnabled(false);
-        prefixText.setEnabled(false);
-        regexText.setEnabled(false);
+            prefixLabel.setEnabled(false);
+            prefixText.setEnabled(false);
+            regexText.setEnabled(false);
 
-        optionPanel.addComponents(regexRadio, regexText);
-        optionPanel.addSeparator();
+            optionPanel.addComponents(regexRadio, regexText);
+
+            optionPanel.addSeparator();
+        }
 
         optionPanel.addSpanningComponent(numericalRadio);
 
@@ -195,28 +222,31 @@ public class GuessDatesDialog {
             }
         });
 
-        ButtonGroup group = new ButtonGroup();
-        group.add(orderRadio);
-        group.add(prefixRadio);
-        group.add(regexRadio);
-        ItemListener listener = new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                orderLabel.setEnabled(!regexRadio.isSelected());
-                orderCombo.setEnabled(!regexRadio.isSelected());
-                prefixLabel.setEnabled(prefixRadio.isSelected());
-                prefixText.setEnabled(prefixRadio.isSelected());
-                regexText.setEnabled(regexRadio.isSelected());
-            }
-        };
-        orderRadio.addItemListener(listener);
-        prefixRadio.addItemListener(listener);
-        regexRadio.addItemListener(listener);
+        if (!parsingFromFile) {
+            ButtonGroup group = new ButtonGroup();
+            group.add(orderRadio);
+            group.add(prefixRadio);
+            group.add(regexRadio);
+            orderRadio.setSelected(true);
+            ItemListener listener = new ItemListener() {
+                public void itemStateChanged(ItemEvent e) {
+                    orderLabel.setEnabled(!regexRadio.isSelected());
+                    orderCombo.setEnabled(!regexRadio.isSelected());
+                    prefixLabel.setEnabled(prefixRadio.isSelected());
+                    prefixText.setEnabled(prefixRadio.isSelected());
+                    regexText.setEnabled(regexRadio.isSelected());
+                }
+            };
+            orderRadio.addItemListener(listener);
+            prefixRadio.addItemListener(listener);
+            regexRadio.addItemListener(listener);
+        }
 
-        group = new ButtonGroup();
+        ButtonGroup group = new ButtonGroup();
         group.add(numericalRadio);
         group.add(calendarRadio);
         group.add(calendar2Radio);
-        listener = new ItemListener() {
+        ItemListener listener = new ItemListener() {
             public void itemStateChanged(ItemEvent e) {
                 offsetCheck.setEnabled(numericalRadio.isSelected());
                 offsetText.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected());
@@ -238,6 +268,7 @@ public class GuessDatesDialog {
             case 0: orderRadio.setSelected(true); break;
             case 1: prefixRadio.setSelected(true); break;
             case 2: regexRadio.setSelected(true); break;
+            //case 3: loadRadio.setSelected(true); break; // Not allow loadRadio in prefs to avoid dialog confusion.
             default: throw new IllegalArgumentException("unknown radio option");
         }
 
@@ -262,6 +293,12 @@ public class GuessDatesDialog {
     }
 
     public int showDialog() {
+        return showDialog(false);
+    }
+
+    public int showDialog(boolean parsingFromFile) {
+
+        setupPanel(parsingFromFile);
 
         JOptionPane optionPane = new JOptionPane(optionPanel,
                 JOptionPane.QUESTION_MESSAGE,
diff --git a/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java b/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
index c67d0ef..c4d9aca 100644
--- a/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
+++ b/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
@@ -28,17 +28,18 @@ package dr.app.beauti.tipdatepanel;
 import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.BeautiPanel;
 import dr.app.beauti.components.tipdatesampling.TipDateSamplingComponentOptions;
-import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.ClockModelGroup;
-import dr.app.beauti.options.DateGuesser;
-import dr.app.beauti.options.GuessDatesException;
+import dr.app.beauti.options.*;
 import dr.app.beauti.types.TipDateSamplingType;
+import dr.app.beauti.util.BEAUTiImporter;
 import dr.app.beauti.util.PanelUtils;
 import dr.app.gui.table.DateCellEditor;
 import dr.app.gui.table.TableEditorStopper;
 import dr.app.gui.table.TableSorter;
+import dr.app.util.Utils;
 import dr.evolution.util.*;
+import dr.evolution.util.Date;
 import dr.evoxml.util.DateUnitsType;
+import dr.util.DataTable;
 import jam.framework.Exportable;
 import jam.table.HeaderRenderer;
 import jam.table.TableRenderer;
@@ -46,15 +47,22 @@ import jam.table.TableRenderer;
 import javax.swing.*;
 import javax.swing.event.ListSelectionEvent;
 import javax.swing.event.ListSelectionListener;
+import javax.swing.filechooser.FileNameExtensionFilter;
 import javax.swing.plaf.BorderUIResource;
 import javax.swing.table.AbstractTableModel;
 import java.awt.*;
 import java.awt.event.*;
-import java.util.EnumSet;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.*;
+import java.util.List;
 
 /**
  * @author Andrew Rambaut
  * @author Alexei Drummond
+ * @author Tommy Lam
  * @version $Id: DataPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
  */
 public class TipDatesPanel extends BeautiPanel implements Exportable {
@@ -71,6 +79,7 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
     SetDatesAction setDatesAction = new SetDatesAction();
     ClearDatesAction clearDatesAction = new ClearDatesAction();
     GuessDatesAction guessDatesAction = new GuessDatesAction();
+    ImportDatesAction importDatesAction = new ImportDatesAction();
 
     SetPrecisionAction setPrecisionAction = new SetPrecisionAction();
 
@@ -164,6 +173,10 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
         PanelUtils.setupComponent(button);
         toolBar1.add(button);
 
+        button = new JButton(importDatesAction);
+        PanelUtils.setupComponent(button);
+        toolBar1.add(button);
+
         button = new JButton(setDatesAction);
         PanelUtils.setupComponent(button);
         toolBar1.add(button);
@@ -264,6 +277,7 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
 
         clearDatesAction.setEnabled(false);
         guessDatesAction.setEnabled(false);
+        importDatesAction.setEnabled(false);
         setDatesAction.setEnabled(false);
         setPrecisionAction.setEnabled(false);
         directionCombo.setEnabled(false);
@@ -284,6 +298,7 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
                 boolean enabled = usingTipDates.isSelected();
                 clearDatesAction.setEnabled(enabled);
                 guessDatesAction.setEnabled(enabled);
+                importDatesAction.setEnabled(enabled);
                 setDatesAction.setEnabled(enabled);
                 setPrecisionAction.setEnabled(enabled);
                 unitsLabel.setEnabled(enabled);
@@ -577,7 +592,7 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
 
         String warningMessage = null;
 
-            if (selRows.length > 0) {
+        if (selRows.length > 0) {
             Taxa selectedTaxa = new Taxa();
 
             for (int row : selRows) {
@@ -601,6 +616,129 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
         dataTableModel.fireTableDataChanged();
     }
 
+    public void importDates() {
+
+        File[] files = frame.selectImportFiles("Import Dates File...", false, new FileNameExtensionFilter[]{
+                new FileNameExtensionFilter("Tab-delimited text files", "txt", "tab", "dat")});
+
+        DataTable<String[]> dataTable;
+
+        if (files != null && files.length != 0) {
+            try {
+                // Load the file as a table
+                dataTable = DataTable.Text.parse(new FileReader(files[0]));
+
+            } catch (FileNotFoundException fnfe) {
+                JOptionPane.showMessageDialog(this, "Unable to open file: File not found",
+                        "Unable to open file",
+                        JOptionPane.ERROR_MESSAGE);
+                return;
+            } catch (IOException ioe) {
+                JOptionPane.showMessageDialog(this, "Unable to read file: " + ioe.getMessage(),
+                        "Unable to read file",
+                        JOptionPane.ERROR_MESSAGE);
+                return;
+            } catch (Exception ex) {
+                ex.printStackTrace(System.err);
+                JOptionPane.showMessageDialog(this, "Fatal exception: " + ex,
+                        "Error reading file",
+                        JOptionPane.ERROR_MESSAGE);
+                ex.printStackTrace();
+                return;
+            }
+        } else {
+            return;
+        }
+
+        if (dataTable.getColumnCount() == 0) {
+            // expecting at least 2 columns - labels and dates
+            JOptionPane.showMessageDialog(frame,
+                    "Expecting a tab delimited file with at\n" +
+                            "least 2 columns (taxon labels and dates).",
+                    "Incompatible values", JOptionPane.ERROR_MESSAGE);
+            return;
+        }
+
+        String[] columnLabels = dataTable.getColumnLabels();
+        String[] taxonNames = dataTable.getRowLabels();
+
+        // assume the first column contains the dates
+        int dateColumn = 0;
+
+        if (columnLabels.length > 1) {
+            List<Integer> dateColumns = new ArrayList<Integer>();
+
+            // see if there is a column labelled 'dates' or something
+            for (int i = 0; i < dataTable.getColumnCount(); i++) {
+                if (columnLabels[i].toLowerCase().contains("date")) {
+                    dateColumns.add(i);
+                }
+            }
+
+            if (dateColumns.size() > 0) {
+                // if there are multiple date column possibilities, take the first
+                // @todo - allow the user to select the column to use
+                dateColumn = dateColumns.get(0);
+            }
+        }
+
+        Map<Taxon, String> taxonDateMap = new HashMap<Taxon, String>();
+        int matchCount = 0;
+        int mismatchCount = 0;
+
+        String[] values = dataTable.getColumn(dateColumn);
+
+        int j = 0;
+        for (final String taxonName : taxonNames) {
+
+            final int index = options.taxonList.getTaxonIndex(taxonName);
+            if (index >= 0) {
+                taxonDateMap.put(options.taxonList.getTaxon(index), values[j]);
+                matchCount ++;
+            } else {
+                mismatchCount ++;
+            }
+            j++;
+        }
+
+        if (guessDatesDialog == null) {
+            guessDatesDialog = new GuessDatesDialog(frame);
+        }
+
+        guessDatesDialog.setDescription("Parse date values from file");
+
+        int result = guessDatesDialog.showDialog(true);
+
+        if (result == -1 || result == JOptionPane.CANCEL_OPTION) {
+            return;
+        }
+
+        DateGuesser guesser = options.dateGuesser;
+
+        guesser.guessDates = true;
+        guessDatesDialog.setupGuesser(guesser);
+
+        String warningMessage = null;
+
+        guesser.guessDates(options.taxonList, taxonDateMap);
+
+        if (warningMessage != null) {
+            JOptionPane.showMessageDialog(this, "Warning: some dates may not be set correctly - \n" + warningMessage,
+                    "Error guessing dates",
+                    JOptionPane.WARNING_MESSAGE);
+        }
+
+        // adjust the dates to the current timescale...
+        timeScaleChanged();
+
+        dataTableModel.fireTableDataChanged();
+    }
+
+    public boolean isMissingValue(String value) {
+        return (value.equals("?") || value.equals("NA") || value.length() == 0);
+    }
+
+
     public class SetDatesAction extends AbstractAction {
         /**
          *
@@ -649,6 +787,22 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
         }
     }
 
+    public class ImportDatesAction extends AbstractAction {
+        /**
+         *
+         */
+        private static final long serialVersionUID = 8514706149822252033L;
+
+        public ImportDatesAction() {
+            super("Import Dates");
+            setToolTipText("Use this tool to import the sampling dates from a file");
+        }
+
+        public void actionPerformed(ActionEvent ae) {
+            importDates();
+        }
+    }
+
     public class SetPrecisionAction extends AbstractAction {
         /**
          *
diff --git a/src/dr/app/beauti/treespanel/PartitionTreePriorPanel.java b/src/dr/app/beauti/treespanel/PartitionTreePriorPanel.java
index 3eed383..4b742c3 100644
--- a/src/dr/app/beauti/treespanel/PartitionTreePriorPanel.java
+++ b/src/dr/app/beauti/treespanel/PartitionTreePriorPanel.java
@@ -462,10 +462,14 @@ public class PartitionTreePriorPanel extends OptionsPanel {
 
         } else {
 
-            for (TreePriorType treePriorType : EnumSet.range(TreePriorType.CONSTANT, TreePriorType.BIRTH_DEATH_BASIC_REPRODUCTIVE_NUMBER)) {
+            for (TreePriorType treePriorType : EnumSet.range(TreePriorType.CONSTANT, TreePriorType.BIRTH_DEATH_SERIAL_SAMPLING)) {
                 treePriorCombo.addItem(treePriorType);
             }
 
+            // REMOVED due to unresolved issues with model
+            // treePriorCombo.addItem(TreePriorType.BIRTH_DEATH_BASIC_REPRODUCTIVE_NUMBER);
+
+
             // would be much better to disable these rather than removing them
             if (isMultiLocus) {
                 treePriorCombo.removeItem(TreePriorType.SKYLINE);
diff --git a/src/dr/app/beauti/types/ClockType.java b/src/dr/app/beauti/types/ClockType.java
index f8b2ade..473638b 100644
--- a/src/dr/app/beauti/types/ClockType.java
+++ b/src/dr/app/beauti/types/ClockType.java
@@ -50,7 +50,7 @@ public enum ClockType {
     final public static String UCED_MEAN = "uced.mean";
     final public static String UCLD_MEAN = "ucld.mean";
     final public static String UCLD_STDEV = "ucld.stdev";
-    final public static String UCGD_SCALE = "ucgd.scale";
+    final public static String UCGD_MEAN = "ucgd.mean";
     final public static String UCGD_SHAPE = "ucgd.shape";
 
     final public static String ACLD_MEAN = "acld.mean";
diff --git a/src/dr/app/oldbeauti/GuessDatesException.java b/src/dr/app/beauti/types/OperatorSetType.java
similarity index 71%
rename from src/dr/app/oldbeauti/GuessDatesException.java
rename to src/dr/app/beauti/types/OperatorSetType.java
index 94978e4..0b1de72 100644
--- a/src/dr/app/oldbeauti/GuessDatesException.java
+++ b/src/dr/app/beauti/types/OperatorSetType.java
@@ -1,5 +1,5 @@
 /*
- * GuessDatesException.java
+ * OperatorType.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,14 +23,24 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.oldbeauti;
+package dr.app.beauti.types;
 
 /**
  * @author Andrew Rambaut
- * @version $Id$
  */
-public class GuessDatesException extends Exception {
-	public GuessDatesException(String message) {
-		super(message);
-	}
+public enum OperatorSetType {
+
+    DEFAULT("classic operator mix"),
+    NEW_TREE_MIX("new experimental mix"),
+    FIXED_TREE_TOPOLOGY("fixed tree topology");
+
+    OperatorSetType(String displayName) {
+        this.displayName = displayName;
+    }
+
+    public String toString() {
+        return displayName;
+    }
+
+    private final String displayName;
 }
diff --git a/src/dr/app/beauti/types/OperatorType.java b/src/dr/app/beauti/types/OperatorType.java
index b8ad47f..9380c08 100644
--- a/src/dr/app/beauti/types/OperatorType.java
+++ b/src/dr/app/beauti/types/OperatorType.java
@@ -59,6 +59,8 @@ public enum OperatorType {
     UNIFORM("uniform"),
     INTEGER_UNIFORM("integerUniform"),
     SUBTREE_SLIDE("subtreeSlide"),
+    SUBTREE_JUMP("subtreeJump"),
+    SUBTREE_LEAP("subtreeLeap"),
     NARROW_EXCHANGE("narrowExchange"),
     WIDE_EXCHANGE("wideExchange"),
     GMRF_GIBBS_OPERATOR("gmrfGibbsOperator"),
diff --git a/src/dr/app/beauti/types/PriorType.java b/src/dr/app/beauti/types/PriorType.java
index 1fe4751..e257eb9 100644
--- a/src/dr/app/beauti/types/PriorType.java
+++ b/src/dr/app/beauti/types/PriorType.java
@@ -48,6 +48,7 @@ public enum PriorType {
     INVERSE_GAMMA_PRIOR("Inverse Gamma", true, true, true),
     BETA_PRIOR("Beta", true, true, true),
     ONE_OVER_X_PRIOR("1/x", true, true, false),
+    DIRICHLET_PRIOR("Dirichlet", false, false, false),
     CTMC_RATE_REFERENCE_PRIOR("CTMC Rate Reference", true, false, false),
     LOGNORMAL_HPM_PRIOR("Lognormal HPM", true, false, false),
     NORMAL_HPM_PRIOR("Normal HPM", true, false, false),
@@ -204,8 +205,11 @@ public enum PriorType {
                 buffer.append(NumberUtil.formatDecimal(parameter.scale, 10, 6));
                 buffer.append("]");
                 break;
+            case DIRICHLET_PRIOR:
+                buffer.append("Dirichlet [1,1]");
+                break;
             case ONE_OVER_X_PRIOR:
-                buffer.append("1/x"); // rename Jeffreys prior to 1/x prior everywhere in Beauti
+                buffer.append("1/x");
                 break;
             case POISSON_PRIOR:
                 buffer.append("Poisson [");
diff --git a/src/dr/app/beauti/types/TreePriorType.java b/src/dr/app/beauti/types/TreePriorType.java
index 289104f..5409492 100644
--- a/src/dr/app/beauti/types/TreePriorType.java
+++ b/src/dr/app/beauti/types/TreePriorType.java
@@ -34,10 +34,10 @@ public enum TreePriorType {
     EXPONENTIAL("Coalescent: Exponential Growth"),
     LOGISTIC("Coalescent: Logistic Growth"),
     EXPANSION("Coalescent: Expansion Growth"),
+    SKYGRID("Coalescent: Bayesian SkyGrid"),
+    GMRF_SKYRIDE("Coalescent: GMRF Bayesian Skyride"),
     SKYLINE("Coalescent: Bayesian Skyline"),
     EXTENDED_SKYLINE("Coalescent: Extended Bayesian Skyline Plot"),
-    GMRF_SKYRIDE("Coalescent: GMRF Bayesian Skyride"),
-    SKYGRID("Coalescent: Bayesian SkyGrid"),
     YULE("Speciation: Yule Process"),
     YULE_CALIBRATION("Speciation: Calibrated Yule"),
     BIRTH_DEATH("Speciation: Birth-Death Process"),
diff --git a/src/dr/app/bss/BeagleSequenceSimulatorApp.java b/src/dr/app/bss/BeagleSequenceSimulatorApp.java
index 3e4512c..c72b3da 100644
--- a/src/dr/app/bss/BeagleSequenceSimulatorApp.java
+++ b/src/dr/app/bss/BeagleSequenceSimulatorApp.java
@@ -149,9 +149,17 @@ public class BeagleSequenceSimulatorApp {
 		
 		if (args.length > 0) {
 			
+			try {
+			
 				BeagleSequenceSimulatorConsoleApp app = new BeagleSequenceSimulatorConsoleApp();
 				app.simulate(args);
 				
+			} catch (UnsupportedClassVersionError e) {
+
+				Utils.handleException(e, "Your Java Runtime Environment is too old. Please update");
+
+			}//END: try-catch block
+				
 		} else {
 
 			try {
@@ -164,17 +172,16 @@ public class BeagleSequenceSimulatorApp {
 
 			} catch (UnsupportedClassVersionError e) {
 
-				System.out
-						.println("Your Java Runtime Environment is too old. Please update");
-
+				Utils.handleException(e, "Your Java Runtime Environment is too old. Please update");
+				
 			} catch (ClassNotFoundException e) {
-				e.printStackTrace();
+				Utils.handleException(e, e.getMessage());
 			} catch (InstantiationException e) {
-				e.printStackTrace();
+				Utils.handleException(e, e.getMessage());
 			} catch (IllegalAccessException e) {
-				e.printStackTrace();
+				Utils.handleException(e, e.getMessage());
 			} catch (UnsupportedLookAndFeelException e) {
-				e.printStackTrace();
+				Utils.handleException(e, e.getMessage());
 			}// END: try catch block
 
 		}// END: command line check
diff --git a/src/dr/app/bss/Changelog b/src/dr/app/bss/Changelog
index 1589c78..0fe977b 100644
--- a/src/dr/app/bss/Changelog
+++ b/src/dr/app/bss/Changelog
@@ -27,11 +27,11 @@ September 2014: PIBUSS version 1.3.7
 * Added additional parameter to the NewickParser which allows the user to use node heights without specifying dates for taxa.
 * Sensible starting values for UCLD clock
 
-PIBUSS version 1.3.8 release candidate
+August 2015: PIBUSS version 1.3.8
 --------------------------------------
 * Parameters of the relaxed lognormal clock are now on the log scale by default and can be changed in XML or in CLI (-lrcParametersInRealSpace true|false). 
   GUI is hardcoded to true.
 * Tool-tip for ucld mean and stdev fields that says these parameters are in the real space
 * XML parser checks siteCounts per partition; previously this behaviour could throw an exception if using rootSequences with multiple partitions
 * Fixed a bug in SiteRate model XML generation; when using multiple partitions alpha parameter was not raised by suffix
-* Fixed a labelling of the Gamma Site Rate model in the appropriate editor
\ No newline at end of file
+* Fixed a labelling of the Gamma Site Rate model in the appropriate editor
diff --git a/src/dr/app/bss/PartitionData.java b/src/dr/app/bss/PartitionData.java
index 421f6ba..52eaa5a 100644
--- a/src/dr/app/bss/PartitionData.java
+++ b/src/dr/app/bss/PartitionData.java
@@ -983,7 +983,8 @@ public class PartitionData implements Serializable {
 	                false, // 
 	                Double.NaN, //
 	                true, //randomizeRates
-	                false // keepRates
+	                false, // keepRates
+					false // cacheRates
 	                );
 
 		} else if(this.clockModelIndex == 2) { // Exponential relaxed clock
@@ -1004,7 +1005,8 @@ public class PartitionData implements Serializable {
 	                false, // 
 	                Double.NaN, //
 	                true, //randomizeRates
-	                false // keepRates
+					false, // keepRates
+					false // cacheRates
 	                );
 	        
 		} else if(this.clockModelIndex == 3) { // Inverse Gaussian
@@ -1024,7 +1026,8 @@ public class PartitionData implements Serializable {
 	                false, // 
 	                Double.NaN, //
 	                true, //randomizeRates
-	                false // keepRates
+					false, // keepRates
+					false // cacheRates
 	                );
 	        
 		} else {
diff --git a/src/dr/app/bss/README.textile b/src/dr/app/bss/README.textile
index 4e08563..8d09e33 100644
--- a/src/dr/app/bss/README.textile
+++ b/src/dr/app/bss/README.textile
@@ -32,7 +32,10 @@ h1. CITING
           }
    
 h1. TODO
-
+* export tree with branches in subst/site
+* lognormal clock real scale / log scale in GUI 
+* change save/load to JSON
+* error handling
 * PIBUSS bug with newick import (Importer line 199 read method). Does not happen if first char is newline
 * boolean switch for real/log scales for the parameters of lognormal clock model 
 * parser for output type should be case insensitive
diff --git a/src/dr/app/bss/Utils.java b/src/dr/app/bss/Utils.java
index 20f84a1..d8cc833 100644
--- a/src/dr/app/bss/Utils.java
+++ b/src/dr/app/bss/Utils.java
@@ -348,7 +348,7 @@ public class Utils {
 	public static final String SAVE_ICON = "icons/save.png";
 	public static final String TEXT_FILE_ICON = "icons/file.png";
 
-	public static double[] UNIFORM_CODON_FREQUENCIES = new double[] {
+	public static final double[] UNIFORM_CODON_FREQUENCIES = new double[] {
 			0.0163936, 0.01639344, 0.01639344, 0.01639344, 0.01639344,
 			0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344,
 			0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344,
@@ -363,6 +363,8 @@ public class Utils {
 			0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344,
 			0.01639344 };
 
+	public static final String STOP_CODONS[] = new String[] { "TAA", "TAG", "TGA" };
+	
 	// ///////////////////////////////
 	// ---GENERAL UTILITY METHODS---//
 	// ///////////////////////////////
@@ -1088,6 +1090,14 @@ public class Utils {
 		System.out.println();
 	}// END: printArray
 
+	public static void printArray(boolean[] x) {
+		for (int i = 0; i < x.length; i++) {
+			System.out.print(x[i] + " ");
+		}
+		System.out.println();
+		
+	}
+	
 	public static void printArray(String[] x) {
 		for (int i = 0; i < x.length; i++) {
 			System.out.println(x[i]);
diff --git a/src/dr/app/bss/test/BeagleSeqSimTest.java b/src/dr/app/bss/test/BeagleSeqSimTest.java
index bfa8375..1b4b9a4 100644
--- a/src/dr/app/bss/test/BeagleSeqSimTest.java
+++ b/src/dr/app/bss/test/BeagleSeqSimTest.java
@@ -690,8 +690,8 @@ public class BeagleSeqSimTest {
 
 			if (calculateLikelihood) {
 
-				// NewBeagleTreeLikelihood nbtl = new
-				// NewBeagleTreeLikelihood(alignment, treeModel,
+				// NewBeagleSequenceLikelihood nbtl = new
+				// NewBeagleSequenceLikelihood(alignment, treeModel,
 				// substitutionModel, (SiteModel) siteRateModel,
 				// branchRateModel, null, false,
 				// PartialsRescalingScheme.DEFAULT);
diff --git a/src/dr/app/gui/chart/JChart.java b/src/dr/app/gui/chart/JChart.java
index fcbf4fb..6b51b7f 100644
--- a/src/dr/app/gui/chart/JChart.java
+++ b/src/dr/app/gui/chart/JChart.java
@@ -521,7 +521,8 @@ public class JChart extends JPanel {
         }
         int n = axis.getMajorTickCount();
         for (int i = 0; i < n; i++) {
-            label = axis.format(axis.getMajorTickValue(i));
+            double value = axis.getMajorTickValue(i);
+            label = axis.format(value);
             width = g2.getFontMetrics().stringWidth(label);
             if (maxWidth < width)
                 maxWidth = width;
diff --git a/src/dr/app/oldbeauti/BeastGenerator.java b/src/dr/app/oldbeauti/BeastGenerator.java
deleted file mode 100644
index 70f6f50..0000000
--- a/src/dr/app/oldbeauti/BeastGenerator.java
+++ /dev/null
@@ -1,3007 +0,0 @@
-/*
- * BeastGenerator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.beast.BeastVersion;
-import dr.app.beauti.generator.InitialTreeGenerator;
-import dr.evolution.alignment.SitePatterns;
-import dr.evolution.datatype.DataType;
-import dr.evolution.datatype.Nucleotides;
-import dr.evolution.datatype.TwoStateCovarion;
-import dr.evolution.tree.NodeRef;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.Taxa;
-import dr.evolution.util.Taxon;
-import dr.evolution.util.TaxonList;
-import dr.evolution.util.Units;
-import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.evomodel.coalescent.VariableDemographicModel;
-import dr.evomodel.sitemodel.GammaSiteModel;
-import dr.evomodel.sitemodel.SiteModel;
-import dr.evomodel.speciation.BirthDeathGernhard08Model;
-import dr.evomodel.substmodel.AminoAcidModelType;
-import dr.evomodel.substmodel.NucModelType;
-import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.CSVExporterParser;
-import dr.evomodelxml.branchratemodel.DiscretizedBranchRatesParser;
-import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
-import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
-import dr.evomodelxml.coalescent.*;
-import dr.evomodelxml.coalescent.operators.SampleNonActiveGibbsOperatorParser;
-import dr.evomodelxml.operators.ExchangeOperatorParser;
-import dr.evomodelxml.operators.SubtreeSlideOperatorParser;
-import dr.evomodelxml.operators.TreeBitMoveOperatorParser;
-import dr.evomodelxml.operators.WilsonBaldingParser;
-import dr.evomodelxml.sitemodel.GammaSiteModelParser;
-import dr.evomodelxml.speciation.BirthDeathModelParser;
-import dr.evomodelxml.speciation.SpeciationLikelihoodParser;
-import dr.evomodelxml.speciation.YuleModelParser;
-import dr.evomodelxml.substmodel.*;
-import dr.evomodelxml.tree.*;
-import dr.evomodelxml.treelikelihood.TreeLikelihoodParser;
-import dr.evoxml.*;
-import dr.inference.distribution.ExponentialDistributionModel;
-import dr.inference.distribution.ExponentialMarkovModel;
-import dr.inference.model.ParameterParser;
-import dr.inferencexml.distribution.*;
-import dr.inferencexml.loggers.ColumnsParser;
-import dr.inferencexml.loggers.LoggerParser;
-import dr.inferencexml.model.*;
-import dr.inferencexml.operators.*;
-import dr.util.Attribute;
-import dr.util.Version;
-import dr.xml.XMLParser;
-
-import java.io.Writer;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * This class holds all the data for the current BEAUti Document
- *
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @version $Id: BeastGenerator.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $
- */
-public class BeastGenerator extends BeautiOptions {
-
-    private final static Version version = new BeastVersion();
-
-    public BeastGenerator() {
-        super();
-    }
-
-    /**
-     * Checks various options to check they are valid. Throws IllegalArgumentExceptions with
-     * descriptions of the problems.
-     *
-     * @throws IllegalArgumentException if there is a problem with the current settings
-     */
-    public void checkOptions() throws IllegalArgumentException {
-        Set<String> ids = new HashSet<String>();
-
-        ids.add(TaxaParser.TAXA);
-        ids.add(AlignmentParser.ALIGNMENT);
-
-        if (taxonList != null) {
-            for (int i = 0; i < taxonList.getTaxonCount(); i++) {
-                Taxon taxon = taxonList.getTaxon(i);
-                if (ids.contains(taxon.getId())) {
-                    throw new IllegalArgumentException("A taxon has the same id," + taxon.getId() +
-                            "\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique.");
-                }
-                ids.add(taxon.getId());
-            }
-        }
-
-        for (Taxa taxa : taxonSets) {
-            if (taxa.getTaxonCount() < 2) {
-                throw new IllegalArgumentException("Taxon set, " + taxa.getId() + ", should contain\n" +
-                        "at least two taxa.");
-            }
-            if (ids.contains(taxa.getId())) {
-                throw new IllegalArgumentException("A taxon sets has the same id," + taxa.getId() +
-                        "\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique.");
-            }
-            ids.add(taxa.getId());
-        }
-
-        getPartionCount(codonHeteroPattern);
-    }
-
-    /**
-     * Generate a beast xml file from these beast options
-     *
-     * @param w the writer
-     */
-    public void generateXML(Writer w) {
-
-        XMLWriter writer = new XMLWriter(w);
-
-        writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
-        writer.writeComment("Generated by BEAUti " + version.getVersionString());
-        writer.writeComment("      by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard");
-        writer.writeComment("      Department of Computer Science, University of Auckland,");
-        writer.writeComment("      Institute of Evolutionary Biology, University of Edinburgh and");
-        writer.writeComment("      David Geffen School of Medicine, University of California, Los Angeles");
-        writer.writeComment("      http://beast.bio.ed.ac.uk/");
-        writer.writeOpenTag("beast");
-        writer.writeText("");
-        writeTaxa(writer);
-
-        if (taxonSets != null && taxonSets.size() > 0) {
-            writeTaxonSets(writer);
-        }
-
-        if (alignment != null) {
-            writeAlignment(writer);
-            writePatternLists(writer);
-        }
-
-        writer.writeText("");
-        writeNodeHeightPriorModel(writer);
-
-        writer.writeText("");
-        writeStartingTree(writer);
-        writer.writeText("");
-        writeTreeModel(writer);
-        writer.writeText("");
-        writeNodeHeightPrior(writer);
-        if (nodeHeightPrior == LOGISTIC) {
-            writer.writeText("");
-            writeBooleanLikelihood(writer);
-        } else if (nodeHeightPrior == SKYLINE) {
-            writer.writeText("");
-            writeExponentialMarkovLikelihood(writer);
-        }
-
-        writer.writeText("");
-        writeBranchRatesModel(writer);
-
-        if (alignment != null) {
-            writer.writeText("");
-            writeSubstitutionModel(writer);
-            writer.writeText("");
-            writeSiteModel(writer);
-            writer.writeText("");
-            writeTreeLikelihood(writer);
-        }
-
-        writer.writeText("");
-
-        if (taxonSets != null && taxonSets.size() > 0) {
-            writeTMRCAStatistics(writer);
-        }
-
-        ArrayList<Operator> operators = selectOperators();
-        writeOperatorSchedule(operators, writer);
-        writer.writeText("");
-        writeMCMC(writer);
-        writer.writeText("");
-        writeTimerReport(writer);
-        writer.writeText("");
-        if (performTraceAnalysis) {
-            writeTraceAnalysis(writer);
-        }
-        if (generateCSV) {
-            writeAnalysisToCSVfile(writer);
-        }
-
-        writer.writeCloseTag("beast");
-        writer.flush();
-    }
-
-    /**
-     * Generate a taxa block from these beast options
-     *
-     * @param writer the writer
-     */
-    public void writeTaxa(XMLWriter writer) {
-
-        writer.writeComment("The list of taxa to be analysed (can also include dates/ages).");
-        writer.writeComment("ntax=" + taxonList.getTaxonCount());
-        writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA)});
-
-        boolean firstDate = true;
-        for (int i = 0; i < taxonList.getTaxonCount(); i++) {
-            Taxon taxon = taxonList.getTaxon(i);
-
-            boolean hasDate = false;
-
-            if (maximumTipHeight > 0.0) {
-                hasDate = TaxonList.Utils.hasAttribute(taxonList, i, dr.evolution.util.Date.DATE);
-            }
-
-            writer.writeTag(TaxonParser.TAXON, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, taxon.getId())}, !hasDate);
-
-            if (hasDate) {
-                dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE);
-
-                if (firstDate) {
-                    units = date.getUnits();
-                    firstDate = false;
-                } else {
-                    if (units != date.getUnits()) {
-                        System.err.println("Error: Units in dates do not match.");
-                    }
-                }
-
-                Attribute[] attributes = {
-                        new Attribute.Default<Double>(DateParser.VALUE, date.getTimeValue()),
-                        new Attribute.Default<String>(DateParser.DIRECTION, date.isBackwards() ? DateParser.BACKWARDS : DateParser.FORWARDS),
-                        new Attribute.Default<String>(DateParser.UNITS, Units.Utils.getDefaultUnitName(units))
-                        /*,
-                                                                                new Attribute.Default("origin", date.getOrigin()+"")*/
-                };
-
-                writer.writeTag(dr.evolution.util.Date.DATE, attributes, true);
-                writer.writeCloseTag(TaxonParser.TAXON);
-            }
-        }
-
-        writer.writeCloseTag(TaxaParser.TAXA);
-    }
-
-    /**
-     * Generate additional taxon sets
-     *
-     * @param writer the writer
-     */
-    public void writeTaxonSets(XMLWriter writer) {
-
-        writer.writeText("");
-        for (Taxa taxa : taxonSets) {
-            writer.writeOpenTag(
-                    TaxaParser.TAXA,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, taxa.getId())
-                    }
-            );
-
-            for (int j = 0; j < taxa.getTaxonCount(); j++) {
-                Taxon taxon = taxa.getTaxon(j);
-
-                writer.writeTag(TaxonParser.TAXON, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, taxon.getId())}, true);
-            }
-            writer.writeCloseTag(TaxaParser.TAXA);
-        }
-    }
-
-    /**
-     * Determine and return the datatype description for these beast options
-     * note that the datatype in XML may differ from the actual datatype
-     *
-     * @return description
-     */
-
-    private String getAlignmentDataTypeDescription() {
-        String description;
-
-        switch (dataType) {
-            case DataType.TWO_STATES:
-            case DataType.COVARION:
-
-                switch (binarySubstitutionModel) {
-                    case BIN_COVARION:
-                        description = TwoStateCovarion.DESCRIPTION;
-                        break;
-
-                    default:
-                        description = alignment.getDataType().getDescription();
-                }
-                break;
-
-            default:
-                description = alignment.getDataType().getDescription();
-        }
-
-        return description;
-    }
-
-
-    /**
-     * Generate an alignment block from these beast options
-     *
-     * @param writer the writer
-     */
-    public void writeAlignment(XMLWriter writer) {
-
-        writer.writeText("");
-        writer.writeComment("The sequence alignment (each sequence refers to a taxon above).");
-        writer.writeComment("ntax=" + alignment.getTaxonCount() + " nchar=" + alignment.getSiteCount());
-        if (samplePriorOnly) {
-            writer.writeComment("Null sequences generated in order to sample from the prior only.");
-        }
-
-
-        writer.writeOpenTag(
-                AlignmentParser.ALIGNMENT,
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, AlignmentParser.ALIGNMENT),
-                        new Attribute.Default<String>("dataType", getAlignmentDataTypeDescription())
-                }
-        );
-
-        for (int i = 0; i < alignment.getTaxonCount(); i++) {
-            Taxon taxon = alignment.getTaxon(i);
-
-            writer.writeOpenTag("sequence");
-            writer.writeTag(TaxonParser.TAXON, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, taxon.getId())}, true);
-            if (!samplePriorOnly) {
-                writer.writeText(alignment.getAlignedSequenceString(i));
-            } else {
-                // 3 Ns written in case 3 codon positions selected...
-                writer.writeText("NNN");
-            }
-            writer.writeCloseTag("sequence");
-        }
-        writer.writeCloseTag(AlignmentParser.ALIGNMENT);
-    }
-
-    /**
-     * Write a demographic model
-     *
-     * @param writer the writer
-     */
-    public void writeNodeHeightPriorModel(XMLWriter writer) {
-
-        String initialPopSize = null;
-
-        if (nodeHeightPrior == CONSTANT) {
-
-            writer.writeComment("A prior assumption that the population size has remained constant");
-            writer.writeComment("throughout the time spanned by the genealogy.");
-            writer.writeOpenTag(
-                    ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "constant"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
-            writeParameter("constant.popSize", writer);
-            writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
-            writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);
-
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            // generate an exponential prior tree
-
-            writer.writeComment("A prior assumption that the population size has grown exponentially");
-            writer.writeComment("throughout the time spanned by the genealogy.");
-            writer.writeOpenTag(
-                    ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "exponential"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            // write pop size socket
-            writer.writeOpenTag(ExponentialGrowthModelParser.POPULATION_SIZE);
-            writeParameter("exponential.popSize", writer);
-            writer.writeCloseTag(ExponentialGrowthModelParser.POPULATION_SIZE);
-
-            if (parameterization == GROWTH_RATE) {
-                // write growth rate socket
-                writer.writeOpenTag(ExponentialGrowthModelParser.GROWTH_RATE);
-                writeParameter("exponential.growthRate", writer);
-                writer.writeCloseTag(ExponentialGrowthModelParser.GROWTH_RATE);
-            } else {
-                // write doubling time socket
-                writer.writeOpenTag(ExponentialGrowthModelParser.DOUBLING_TIME);
-                writeParameter("exponential.doublingTime", writer);
-                writer.writeCloseTag(ExponentialGrowthModelParser.DOUBLING_TIME);
-            }
-
-            writer.writeCloseTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL);
-        } else if (nodeHeightPrior == LOGISTIC) {
-            // generate an exponential prior tree
-
-            writer.writeComment("A prior assumption that the population size has grown logistically");
-            writer.writeComment("throughout the time spanned by the genealogy.");
-            writer.writeOpenTag(
-                    LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "logistic"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            // write pop size socket
-            writer.writeOpenTag(LogisticGrowthModelParser.POPULATION_SIZE);
-            writeParameter("logistic.popSize", writer);
-            writer.writeCloseTag(LogisticGrowthModelParser.POPULATION_SIZE);
-
-            if (parameterization == GROWTH_RATE) {
-                // write growth rate socket
-                writer.writeOpenTag(LogisticGrowthModelParser.GROWTH_RATE);
-                writeParameter("logistic.growthRate", writer);
-                writer.writeCloseTag(LogisticGrowthModelParser.GROWTH_RATE);
-            } else {
-                // write doubling time socket
-                writer.writeOpenTag(LogisticGrowthModelParser.DOUBLING_TIME);
-                writeParameter("logistic.doublingTime", writer);
-                writer.writeCloseTag(LogisticGrowthModelParser.DOUBLING_TIME);
-            }
-
-            // write logistic t50 socket
-            writer.writeOpenTag(LogisticGrowthModelParser.TIME_50);
-            writeParameter("logistic.t50", writer);
-            writer.writeCloseTag(LogisticGrowthModelParser.TIME_50);
-
-            writer.writeCloseTag(LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL);
-
-            initialPopSize = "logistic.popSize";
-
-        } else if (nodeHeightPrior == EXPANSION) {
-            // generate an exponential prior tree
-
-            writer.writeComment("A prior assumption that the population size has grown exponentially");
-            writer.writeComment("from some ancestral population size in the past.");
-            writer.writeOpenTag(
-                    ExpansionModelParser.EXPANSION_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "expansion"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            // write pop size socket
-            writer.writeOpenTag(ExpansionModelParser.POPULATION_SIZE);
-            writeParameter("expansion.popSize", writer);
-            writer.writeCloseTag(ExpansionModelParser.POPULATION_SIZE);
-
-            if (parameterization == GROWTH_RATE) {
-                // write growth rate socket
-                writer.writeOpenTag(ExpansionModelParser.GROWTH_RATE);
-                writeParameter("expansion.growthRate", writer);
-                writer.writeCloseTag(ExpansionModelParser.GROWTH_RATE);
-            } else {
-                // write doubling time socket
-                writer.writeOpenTag(ExpansionModelParser.DOUBLING_TIME);
-                writeParameter("expansion.doublingTime", writer);
-                writer.writeCloseTag(ExpansionModelParser.DOUBLING_TIME);
-            }
-
-            // write ancestral proportion socket
-            writer.writeOpenTag(ExpansionModelParser.ANCESTRAL_POPULATION_PROPORTION);
-            writeParameter("expansion.ancestralProportion", writer);
-            writer.writeCloseTag(ExpansionModelParser.ANCESTRAL_POPULATION_PROPORTION);
-
-            writer.writeCloseTag(ExpansionModelParser.EXPANSION_MODEL);
-
-            initialPopSize = "expansion.popSize";
-
-        } else if (nodeHeightPrior == YULE) {
-            writer.writeComment("A prior on the distribution node heights defined given");
-            writer.writeComment("a Yule speciation process (a pure birth process).");
-            writer.writeOpenTag(
-                    YuleModelParser.YULE_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "yule"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            writer.writeOpenTag(YuleModelParser.BIRTH_RATE);
-            writeParameter("yule.birthRate", writer);
-            writer.writeCloseTag(YuleModelParser.BIRTH_RATE);
-            writer.writeCloseTag(YuleModelParser.YULE_MODEL);
-        } else if (nodeHeightPrior == BIRTH_DEATH) {
-            writer.writeComment("A prior on the distribution node heights defined given");
-            writer.writeComment("a Birth-Death speciation process (Gernhard 2008).");
-            writer.writeOpenTag(
-                    BirthDeathGernhard08Model.BIRTH_DEATH_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "birthDeath"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            writer.writeOpenTag(BirthDeathModelParser.BIRTHDIFF_RATE);
-            writeParameter(BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME, writer);
-            writer.writeCloseTag(BirthDeathModelParser.BIRTHDIFF_RATE);
-            writer.writeOpenTag(BirthDeathModelParser.RELATIVE_DEATH_RATE);
-            writeParameter(BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, writer);
-            writer.writeCloseTag(BirthDeathModelParser.RELATIVE_DEATH_RATE);
-
-            writer.writeCloseTag(BirthDeathGernhard08Model.BIRTH_DEATH_MODEL);
-        }
-
-        if (nodeHeightPrior != CONSTANT && nodeHeightPrior != EXPONENTIAL) {
-            // If the node height prior is not one of these two then we need to simulate a
-            // random starting tree under a constant size coalescent.
-
-            writer.writeComment("This is a simple constant population size coalescent model");
-            writer.writeComment("that is used to generate an initial tree for the chain.");
-            writer.writeOpenTag(
-                    ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "initialDemo"),
-                            new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units))
-                    }
-            );
-
-            writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
-            if (initialPopSize != null) {
-                writer.writeTag(ParameterParser.PARAMETER,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.IDREF, initialPopSize),
-                        }, true);
-            } else {
-                writeParameter("initialDemo.popSize", 1, 100.0, Double.NaN, Double.NaN, writer);
-            }
-            writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
-            writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);
-        }
-
-    }
-
-    /**
-     * Writes the pattern lists
-     *
-     * @param writer the writer
-     */
-    public void writePatternLists(XMLWriter writer) {
-
-        partitionCount = getPartionCount(codonHeteroPattern);
-
-        writer.writeText("");
-        if (alignment.getDataType() == Nucleotides.INSTANCE && codonHeteroPattern != null && partitionCount > 1) {
-
-            if (codonHeteroPattern.equals("112")) {
-                writer.writeComment("The unique patterns for codon positions 1 & 2");
-                writer.writeOpenTag(MergePatternsParser.MERGE_PATTERNS,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, "patterns1+2"),
-                        }
-                );
-                writePatternList(1, 3, writer);
-                writePatternList(2, 3, writer);
-                writer.writeCloseTag(MergePatternsParser.MERGE_PATTERNS);
-
-                writePatternList(3, 3, writer);
-
-            } else {
-                // pattern is 123
-                // write pattern lists for all three codon positions
-                for (int i = 1; i <= 3; i++) {
-                    writePatternList(i, 3, writer);
-                }
-
-            }
-        } else {
-            partitionCount = 1;
-            writePatternList(-1, 0, writer);
-        }
-    }
-
-    private int getPartionCount(String codonPattern) {
-
-        if (codonPattern == null || codonPattern.equals("111")) {
-            return 1;
-        }
-        if (codonPattern.equals("123")) {
-            return 3;
-        }
-        if (codonPattern.equals("112")) {
-            return 2;
-        }
-        throw new IllegalArgumentException("codonPattern must be one of '111', '112' or '123'");
-    }
-
-    /**
-     * Write a single pattern list
-     *
-     * @param writer the writer
-     * @param from   from site
-     * @param every  skip every
-     */
-    private void writePatternList(int from, int every, XMLWriter writer) {
-
-        String id = SitePatternsParser.PATTERNS;
-        if (from < 1) {
-            writer.writeComment("The unique patterns for all positions");
-            from = 1;
-        } else {
-            writer.writeComment("The unique patterns for codon position " + from);
-            id += Integer.toString(from);
-        }
-
-        SitePatterns patterns = new SitePatterns(alignment, from - 1, 0, every);
-        writer.writeComment("npatterns=" + patterns.getPatternCount());
-        if (every != 0) {
-            writer.writeOpenTag(SitePatternsParser.PATTERNS,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, id),
-                            new Attribute.Default<String>("from", "" + from),
-                            new Attribute.Default<String>("every", "" + every)
-                    }
-            );
-        } else {
-            writer.writeOpenTag(SitePatternsParser.PATTERNS,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, id),
-                            new Attribute.Default<String>("from", "" + from)
-                    }
-            );
-        }
-
-        writer.writeTag(AlignmentParser.ALIGNMENT, new Attribute.Default<String>(XMLParser.IDREF, AlignmentParser.ALIGNMENT), true);
-        writer.writeCloseTag(SitePatternsParser.PATTERNS);
-    }
-
-    /**
-     * Write tree model XML block.
-     *
-     * @param writer the writer
-     */
-    private void writeTreeModel(XMLWriter writer) {
-
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.ID, "treeModel"), false);
-
-        if (userTree) {
-            writer.writeTag("tree", new Attribute.Default<String>(XMLParser.IDREF, InitialTreeGenerator.STARTING_TREE), true);
-        } else {
-            writer.writeTag(OldCoalescentSimulatorParser.COALESCENT_TREE, new Attribute.Default<String>(XMLParser.IDREF, InitialTreeGenerator.STARTING_TREE), true);
-        }
-
-        writer.writeOpenTag(TreeModelParser.ROOT_HEIGHT);
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, "treeModel.rootHeight"), true);
-        writer.writeCloseTag(TreeModelParser.ROOT_HEIGHT);
-
-
-        writer.writeOpenTag(TreeModelParser.NODE_HEIGHTS, new Attribute.Default<String>(TreeModelParser.INTERNAL_NODES, "true"));
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, "treeModel.internalNodeHeights"), true);
-        writer.writeCloseTag(TreeModelParser.NODE_HEIGHTS);
-
-        writer.writeOpenTag(TreeModelParser.NODE_HEIGHTS,
-                new Attribute[]{
-                        new Attribute.Default<String>(TreeModelParser.INTERNAL_NODES, "true"),
-                        new Attribute.Default<String>(TreeModelParser.ROOT_NODE, "true")
-                });
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, "treeModel.allInternalNodeHeights"), true);
-        writer.writeCloseTag(TreeModelParser.NODE_HEIGHTS);
-
-        if (clockModel == RANDOM_LOCAL_CLOCK) {
-            writer.writeOpenTag(TreeModelParser.NODE_RATES,
-                    new Attribute[]{
-                            new Attribute.Default<String>(TreeModelParser.ROOT_NODE, "false"),
-                            new Attribute.Default<String>(TreeModelParser.INTERNAL_NODES, "true"),
-                            new Attribute.Default<String>(TreeModelParser.LEAF_NODES, "true")
-                    });
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, LOCAL_CLOCK + "." + "rates"), true);
-            writer.writeCloseTag(TreeModelParser.NODE_RATES);
-
-            writer.writeOpenTag(TreeModelParser.NODE_TRAITS,
-                    new Attribute[]{
-                            new Attribute.Default<String>(TreeModelParser.ROOT_NODE, "false"),
-                            new Attribute.Default<String>(TreeModelParser.INTERNAL_NODES, "true"),
-                            new Attribute.Default<String>(TreeModelParser.LEAF_NODES, "true")
-                    });
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.ID, LOCAL_CLOCK + "." + "changes"), true);
-            writer.writeCloseTag(TreeModelParser.NODE_TRAITS);
-        }
-
-        writer.writeCloseTag(TreeModel.TREE_MODEL);
-    }
-
-    /**
-     * Writes the substitution model to XML.
-     *
-     * @param writer the writer
-     */
-    public void writeSubstitutionModel(XMLWriter writer) {
-
-
-        switch (dataType) {
-            case DataType.NUCLEOTIDES:
-                // Jukes-Cantor model
-                if (nucSubstitutionModel == JC) {
-                    writer.writeComment("The JC substitution model (Jukes & Cantor, 1969)");
-                    writer.writeOpenTag(
-                            NucModelType.HKY.getXMLName(),
-                            new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "jc")}
-                    );
-                    writer.writeOpenTag(HKYParser.FREQUENCIES);
-                    writer.writeOpenTag(
-                            FrequencyModelParser.FREQUENCY_MODEL,
-                            new Attribute[]{
-                                    new Attribute.Default<String>("dataType", alignment.getDataType().getDescription())
-                            }
-                    );
-                    writer.writeOpenTag(FrequencyModelParser.FREQUENCIES);
-                    writer.writeTag(
-                            ParameterParser.PARAMETER,
-                            new Attribute[]{
-                                    new Attribute.Default<String>(XMLParser.ID, "jc.frequencies"),
-                                    new Attribute.Default<String>("value", "0.25 0.25 0.25 0.25")
-                            },
-                            true
-                    );
-                    writer.writeCloseTag(FrequencyModelParser.FREQUENCIES);
-
-                    writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
-                    writer.writeCloseTag(HKYParser.FREQUENCIES);
-
-                    writer.writeOpenTag(HKYParser.KAPPA);
-                    writeParameter("jc.kappa", 1, 1.0, Double.NaN, Double.NaN, writer);
-                    writer.writeCloseTag(HKYParser.KAPPA);
-                    writer.writeCloseTag(NucModelType.HKY.getXMLName());
-
-                } else {
-                    // Hasegawa Kishino and Yano 85 model
-                    if (nucSubstitutionModel == HKY) {
-                        if (unlinkedSubstitutionModel) {
-                            for (int i = 1; i <= partitionCount; i++) {
-                                writeHKYModel(i, writer);
-                            }
-                        } else {
-                            writeHKYModel(-1, writer);
-                        }
-                    } else {
-                        // General time reversible model
-                        if (nucSubstitutionModel == GTR) {
-                            if (unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    writeGTRModel(i, writer);
-                                }
-                            } else {
-                                writeGTRModel(-1, writer);
-                            }
-                        }
-                    }
-                }
-                break;
-
-            case DataType.AMINO_ACIDS:
-                // Amino Acid model
-                String aaModel = "";
-
-                switch (aaSubstitutionModel) {
-                    case 0:
-                        aaModel = AminoAcidModelType.BLOSUM_62.getXMLName();
-                        break;
-                    case 1:
-                        aaModel = AminoAcidModelType.DAYHOFF.getXMLName();
-                        break;
-                    case 2:
-                        aaModel = AminoAcidModelType.JTT.getXMLName();
-                        break;
-                    case 3:
-                        aaModel = AminoAcidModelType.MT_REV_24.getXMLName();
-                        break;
-                    case 4:
-                        aaModel = AminoAcidModelType.CP_REV_45.getXMLName();
-                        break;
-                    case 5:
-                        aaModel = AminoAcidModelType.WAG.getXMLName();
-                        break;
-                }
-
-                writer.writeComment("The " + aaModel + " substitution model");
-                writer.writeTag(
-                        EmpiricalAminoAcidModelParser.EMPIRICAL_AMINO_ACID_MODEL,
-                        new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "aa"),
-                                new Attribute.Default<String>("type", aaModel)}, true
-                );
-
-                break;
-
-            case DataType.TWO_STATES:
-            case DataType.COVARION:
-
-                switch (binarySubstitutionModel) {
-                    case BIN_SIMPLE:
-                        writeBinarySimpleModel(writer);
-                        break;
-                    case BIN_COVARION:
-                        writeBinaryCovarionModel(writer);
-                        break;
-                }
-
-                break;
-        }
-    }
-
-    /**
-     * Write the HKY model XML block.
-     *
-     * @param num    the model number
-     * @param writer the writer
-     */
-    public void writeHKYModel(int num, XMLWriter writer) {
-        String id = "hky";
-        if (num > 0) {
-            id += Integer.toString(num);
-        }
-        // Hasegawa Kishino and Yano 85 model
-        writer.writeComment("The HKY substitution model (Hasegawa, Kishino & Yano, 1985)");
-        writer.writeOpenTag(
-                NucModelType.HKY.getXMLName(),
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)}
-        );
-        writer.writeOpenTag(HKYParser.FREQUENCIES);
-        writer.writeOpenTag(
-                FrequencyModelParser.FREQUENCY_MODEL,
-                new Attribute[]{
-                        new Attribute.Default<String>("dataType", alignment.getDataType().getDescription())
-                }
-        );
-        writer.writeTag(AlignmentParser.ALIGNMENT, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, AlignmentParser.ALIGNMENT)}, true);
-        writer.writeOpenTag(FrequencyModelParser.FREQUENCIES);
-        if (frequencyPolicy == ALLEQUAL)
-            writeParameter(id + ".frequencies", 4, writer);
-        else
-            writeParameter(id + ".frequencies", 4, Double.NaN, Double.NaN, Double.NaN, writer);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCIES);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
-        writer.writeCloseTag(HKYParser.FREQUENCIES);
-
-        writer.writeOpenTag(HKYParser.KAPPA);
-        writeParameter(id + ".kappa", writer);
-        writer.writeCloseTag(HKYParser.KAPPA);
-        writer.writeCloseTag(NucModelType.HKY.getXMLName());
-    }
-
-    /**
-     * Write the GTR model XML block.
-     *
-     * @param num    the model number
-     * @param writer the writer
-     */
-    public void writeGTRModel(int num, XMLWriter writer) {
-        String id = "gtr";
-        if (num > 0) {
-            id += Integer.toString(num);
-        }
-
-        writer.writeComment("The general time reversible (GTR) substitution model");
-        writer.writeOpenTag(
-                GTRParser.GTR_MODEL,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)}
-        );
-        writer.writeOpenTag(GTRParser.FREQUENCIES);
-        writer.writeOpenTag(
-                FrequencyModelParser.FREQUENCY_MODEL,
-                new Attribute[]{
-                        new Attribute.Default<String>("dataType", alignment.getDataType().getDescription())
-                }
-        );
-        writer.writeTag(AlignmentParser.ALIGNMENT, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, AlignmentParser.ALIGNMENT)}, true);
-        writer.writeOpenTag(FrequencyModelParser.FREQUENCIES);
-        if (frequencyPolicy == ALLEQUAL)
-            writeParameter(id + ".frequencies", 4, writer);
-        else
-            writeParameter(id + ".frequencies", 4, Double.NaN, Double.NaN, Double.NaN, writer);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCIES);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
-        writer.writeCloseTag(GTRParser.FREQUENCIES);
-
-        writer.writeOpenTag(GTRParser.A_TO_C);
-        writeParameter(id + ".ac", writer);
-        writer.writeCloseTag(GTRParser.A_TO_C);
-
-        writer.writeOpenTag(GTRParser.A_TO_G);
-        writeParameter(id + ".ag", writer);
-        writer.writeCloseTag(GTRParser.A_TO_G);
-
-        writer.writeOpenTag(GTRParser.A_TO_T);
-        writeParameter(id + ".at", writer);
-        writer.writeCloseTag(GTRParser.A_TO_T);
-
-        writer.writeOpenTag(GTRParser.C_TO_G);
-        writeParameter(id + ".cg", writer);
-        writer.writeCloseTag(GTRParser.C_TO_G);
-
-        writer.writeOpenTag(GTRParser.G_TO_T);
-        writeParameter(id + ".gt", writer);
-        writer.writeCloseTag(GTRParser.G_TO_T);
-        writer.writeCloseTag(GTRParser.GTR_MODEL);
-    }
-
-
-    /**
-     * Write the Binary  simple model XML block.
-     *
-     * @param writer the writer
-     */
-    public void writeBinarySimpleModel(XMLWriter writer) {
-        final String id = "bsimple";
-
-        writer.writeComment("The Binary simple model (based on the general substitution model)");
-        writer.writeOpenTag(
-                BinarySubstitutionModelParser.BINARY_SUBSTITUTION_MODEL,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)}
-        );
-        writer.writeOpenTag(GeneralSubstitutionModelParser.FREQUENCIES);
-        writer.writeOpenTag(
-                FrequencyModelParser.FREQUENCY_MODEL,
-                new Attribute[]{
-                        new Attribute.Default<String>("dataType", alignment.getDataType().getDescription())
-                }
-        );
-        writer.writeTag(AlignmentParser.ALIGNMENT, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, AlignmentParser.ALIGNMENT)}, true);
-        writer.writeOpenTag(FrequencyModelParser.FREQUENCIES);
-        writeParameter(id + ".frequencies", 2, Double.NaN, Double.NaN, Double.NaN, writer);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCIES);
-        writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
-        writer.writeCloseTag(GeneralSubstitutionModelParser.FREQUENCIES);
-
-        writer.writeCloseTag(BinarySubstitutionModelParser.BINARY_SUBSTITUTION_MODEL);
-    }
-
-
-    /**
-     * Write the Binary covarion model XML block
-     *
-     * @param writer the writer
-     */
-
-    public void writeBinaryCovarionModel(XMLWriter writer) {
-        String id = "bcov";
-
-        writer.writeComment("The Binary covarion model");
-        writer.writeOpenTag(
-                BinaryCovarionModelParser.COVARION_MODEL,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)}
-        );
-
-        writer.writeOpenTag(BinaryCovarionModelParser.FREQUENCIES);
-        writeParameter(id + ".frequencies", 2, 0.5, 0.0, 1.0, writer);
-        writer.writeCloseTag(BinaryCovarionModelParser.FREQUENCIES);
-
-        writer.writeOpenTag(BinaryCovarionModelParser.HIDDEN_FREQUENCIES);
-        writeParameter(id + ".hfrequencies", 2, 0.5, 0.0, 1.0, writer);
-        writer.writeCloseTag(BinaryCovarionModelParser.HIDDEN_FREQUENCIES);
-
-        writer.writeOpenTag(BinaryCovarionModelParser.ALPHA);
-        writeParameter(id + ".alpha", writer);
-        writer.writeCloseTag(BinaryCovarionModelParser.ALPHA);
-
-        writer.writeOpenTag(BinaryCovarionModelParser.SWITCHING_RATE);
-        writeParameter(id + ".s", writer);
-        writer.writeCloseTag(BinaryCovarionModelParser.SWITCHING_RATE);
-
-        writer.writeCloseTag(BinaryCovarionModelParser.COVARION_MODEL);
-    }
-
-    /**
-     * Write the site model XML block.
-     *
-     * @param writer the writer
-     */
-    public void writeSiteModel(XMLWriter writer) {
-
-        switch (dataType) {
-            case DataType.NUCLEOTIDES:
-                if (codonHeteroPattern != null) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        writeNucSiteModel(i, writer);
-                    }
-                    writer.println();
-                    writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "allMus")});
-                    for (int i = 1; i <= partitionCount; i++) {
-                        writer.writeTag(ParameterParser.PARAMETER,
-                                new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + i + ".mu")}, true);
-                    }
-                    writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
-                } else {
-                    writeNucSiteModel(-1, writer);
-                }
-                break;
-
-            case DataType.AMINO_ACIDS:
-                writeAASiteModel(writer);
-                break;
-
-            case DataType.TWO_STATES:
-            case DataType.COVARION:
-                writeTwoStateSiteModel(writer);
-                break;
-
-            default:
-                throw new IllegalArgumentException("Unknown data type");
-        }
-    }
-
-    /**
-     * Write the nucleotide site model XML block.
-     *
-     * @param num    the model number
-     * @param writer the writer
-     */
-    public void writeNucSiteModel(int num, XMLWriter writer) {
-
-        String id = SiteModel.SITE_MODEL;
-        if (num > 0) {
-            id += Integer.toString(num);
-        }
-
-        writer.writeComment("site model");
-        writer.writeOpenTag(GammaSiteModel.SITE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)});
-
-
-        writer.writeOpenTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-
-        if (unlinkedSubstitutionModel) {
-            switch (nucSubstitutionModel) {
-                // JC cannot be unlinked because it has no parameters
-                case JC:
-                    writer.writeTag(NucModelType.HKY.getXMLName(), new Attribute.Default<String>(XMLParser.IDREF, "jc"), true);
-                    break;
-                case HKY:
-                    writer.writeTag(NucModelType.HKY.getXMLName(), new Attribute.Default<String>(XMLParser.IDREF, "hky" + num), true);
-                    break;
-                case GTR:
-                    writer.writeTag(GTRParser.GTR_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + num), true);
-                    break;
-                default:
-                    throw new IllegalArgumentException("Unknown substitution model.");
-            }
-        } else {
-            switch (nucSubstitutionModel) {
-                case JC:
-                    writer.writeTag(NucModelType.HKY.getXMLName(), new Attribute.Default<String>(XMLParser.IDREF, "jc"), true);
-                    break;
-                case HKY:
-                    writer.writeTag(NucModelType.HKY.getXMLName(), new Attribute.Default<String>(XMLParser.IDREF, "hky"), true);
-                    break;
-                case GTR:
-                    writer.writeTag(GTRParser.GTR_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "gtr"), true);
-                    break;
-                default:
-                    throw new IllegalArgumentException("Unknown substitution model.");
-            }
-        }
-        writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-
-        if (num != -1) {
-            writer.writeOpenTag(GammaSiteModelParser.RELATIVE_RATE);
-            writeParameter(id + ".mu", writer);
-            writer.writeCloseTag(GammaSiteModelParser.RELATIVE_RATE);
-        } else {
-//            The actual mutation rate is now in the BranchRateModel so relativeRate can be missing
-        }
-
-        if (gammaHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.GAMMA_SHAPE, new Attribute.Default<String>(GammaSiteModelParser.GAMMA_CATEGORIES, "" + gammaCategories));
-            if (num == -1 || unlinkedHeterogeneityModel) {
-                writeParameter(id + ".alpha", writer);
-            } else {
-                // multiple partitions but linked heterogeneity
-                if (num == 1) {
-                    writeParameter(SiteModel.SITE_MODEL + "." + "alpha", writer);
-                } else {
-                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + "." + "alpha"), true);
-                }
-            }
-            writer.writeCloseTag(GammaSiteModelParser.GAMMA_SHAPE);
-        }
-
-        if (invarHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-            if (num == -1 || unlinkedHeterogeneityModel) {
-                writeParameter(id + ".pInv", writer);
-            } else {
-                // multiple partitions but linked heterogeneity
-                if (num == 1) {
-                    writeParameter(SiteModel.SITE_MODEL + "." + "pInv", writer);
-                } else {
-                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + "." + "pInv"), true);
-                }
-            }
-            writer.writeCloseTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-        }
-
-        writer.writeCloseTag(GammaSiteModel.SITE_MODEL);
-    }
-
-    /**
-     * Write the two states site model XML block.
-     *
-     * @param writer the writer
-     */
-    public void writeTwoStateSiteModel(XMLWriter writer) {
-
-        String id = SiteModel.SITE_MODEL;
-
-        writer.writeComment("site model");
-        writer.writeOpenTag(GammaSiteModel.SITE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, id)});
-
-
-        writer.writeOpenTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-
-        switch (binarySubstitutionModel) {
-            case BIN_SIMPLE:
-                //writer.writeTag(dr.evomodel.substmodel.GeneralSubstitutionModel.GENERAL_SUBSTITUTION_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "bsimple"), true);
-                writer.writeTag(BinarySubstitutionModelParser.BINARY_SUBSTITUTION_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "bsimple"), true);
-                break;
-            case BIN_COVARION:
-                writer.writeTag(BinaryCovarionModelParser.COVARION_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "bcov"), true);
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown substitution model.");
-        }
-
-        writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-
-        if (gammaHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.GAMMA_SHAPE, new Attribute.Default<String>(GammaSiteModelParser.GAMMA_CATEGORIES, "" + gammaCategories));
-            writeParameter(id + ".alpha", writer);
-            writer.writeCloseTag(GammaSiteModelParser.GAMMA_SHAPE);
-        }
-
-        if (invarHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-            writeParameter(id + ".pInv", writer);
-            writer.writeCloseTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-        }
-
-        writer.writeCloseTag(GammaSiteModel.SITE_MODEL);
-    }
-
-
-    /**
-     * Write the AA site model XML block.
-     *
-     * @param writer the writer
-     */
-    public void writeAASiteModel(XMLWriter writer) {
-
-        writer.writeComment("site model");
-        writer.writeOpenTag(GammaSiteModel.SITE_MODEL, new Attribute[]{
-                new Attribute.Default<String>(XMLParser.ID, SiteModel.SITE_MODEL)});
-
-
-        writer.writeOpenTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-        writer.writeTag(EmpiricalAminoAcidModelParser.EMPIRICAL_AMINO_ACID_MODEL,
-                new Attribute.Default<String>(XMLParser.IDREF, "aa"), true);
-        writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
-
-//            The actual mutation rate is now in the BranchRateModel so relativeRate can be missing
-
-        if (gammaHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.GAMMA_SHAPE, new Attribute.Default<String>(GammaSiteModelParser.GAMMA_CATEGORIES, "" + gammaCategories));
-            writeParameter(SiteModel.SITE_MODEL + "." + "alpha", writer);
-            writer.writeCloseTag(GammaSiteModelParser.GAMMA_SHAPE);
-        }
-
-        if (invarHetero) {
-            writer.writeOpenTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-            writeParameter(SiteModel.SITE_MODEL + "." + "pInv", writer);
-            writer.writeCloseTag(GammaSiteModelParser.PROPORTION_INVARIANT);
-        }
-
-        writer.writeCloseTag(GammaSiteModel.SITE_MODEL);
-    }
-
-
-    /**
-     * Write the relaxed clock branch rates block.
-     *
-     * @param writer the writer
-     */
-    public void writeBranchRatesModel(XMLWriter writer) {
-        if (clockModel == STRICT_CLOCK) {
-            if (fixedSubstitutionRate) {
-                fixParameter("clock.rate", meanSubstitutionRate);
-            }
-
-            writer.writeComment("The strict clock (Uniform rates across branches)");
-            writer.writeOpenTag(
-                    StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES,
-                    new Attribute[]{new Attribute.Default<String>(XMLParser.ID, BranchRateModel.BRANCH_RATES)}
-            );
-            writer.writeOpenTag("rate");
-
-            writeParameter("clock.rate", writer);
-            writer.writeCloseTag("rate");
-            writer.writeCloseTag(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES);
-        } else if (clockModel == RANDOM_LOCAL_CLOCK) {
-            if (fixedSubstitutionRate) {
-                fixParameter("clock.rate", meanSubstitutionRate);
-            }
-
-            writer.writeComment("The random local clock model (Drummond & Suchard, 2007)");
-            writer.writeOpenTag(
-                    RandomLocalClockModelParser.LOCAL_BRANCH_RATES,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, BranchRateModel.BRANCH_RATES),
-                            new Attribute.Default<String>("ratesAreMultipliers", "false")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-
-            writer.writeOpenTag("rates");
-            writer.writeTag("parameter", new Attribute.Default<String>(XMLParser.IDREF, LOCAL_CLOCK + "." + "rates"), true);
-            writer.writeCloseTag("rates");
-
-            writer.writeOpenTag("rateIndicator");
-            writer.writeTag("parameter", new Attribute.Default<String>(XMLParser.IDREF, LOCAL_CLOCK + "." + "changes"), true);
-            writer.writeCloseTag("rateIndicator");
-
-            writer.writeOpenTag("clockRate");
-            writeParameter("clock.rate", writer);
-            writer.writeCloseTag("clockRate");
-
-            writer.writeCloseTag(RandomLocalClockModelParser.LOCAL_BRANCH_RATES);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    SumStatisticParser.SUM_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "rateChanges"),
-                            new Attribute.Default<String>("name", "rateChangeCount"),
-                            new Attribute.Default<String>("elementwise", "true"),
-                    }
-            );
-            writer.writeTag("parameter", new Attribute.Default<String>(XMLParser.IDREF, LOCAL_CLOCK + "." + "changes"), true);
-            writer.writeCloseTag(SumStatisticParser.SUM_STATISTIC);
-
-            writer.writeText("");
-
-            writer.writeOpenTag(
-                    RateStatisticParser.RATE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "meanRate"),
-                            new Attribute.Default<String>("name", "meanRate"),
-                            new Attribute.Default<String>("mode", "mean"),
-                            new Attribute.Default<String>("internal", "true"),
-                            new Attribute.Default<String>("external", "true")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(RandomLocalClockModelParser.LOCAL_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    RateStatisticParser.RATE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("name", RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("mode", RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("internal", "true"),
-                            new Attribute.Default<String>("external", "true")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(RandomLocalClockModelParser.LOCAL_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "covariance"),
-                            new Attribute.Default<String>("name", "covariance")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(RandomLocalClockModelParser.LOCAL_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC);
-
-        } else {
-            writer.writeComment("The uncorrelated relaxed clock (Drummond, Ho, Phillips & Rambaut, 2006)");
-            writer.writeOpenTag(
-                    DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES,
-                    new Attribute[]{new Attribute.Default<String>(XMLParser.ID, BranchRateModel.BRANCH_RATES)}
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeOpenTag("distribution");
-            if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                if (fixedSubstitutionRate) {
-                    fixParameter(UCED_MEAN, meanSubstitutionRate);
-                }
-
-                final String eModelName = ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL;
-                writer.writeOpenTag(eModelName);
-                writer.writeOpenTag("mean");
-                writeParameter(UCED_MEAN, writer);
-                writer.writeCloseTag("mean");
-                writer.writeCloseTag(eModelName);
-            } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                if (fixedSubstitutionRate) {
-                    fixParameter(UCLD_MEAN, meanSubstitutionRate);
-                }
-
-                writer.writeOpenTag(LogNormalDistributionModelParser.LOGNORMAL_DISTRIBUTION_MODEL,
-                        new Attribute.Default<String>(LogNormalDistributionModelParser.MEAN_IN_REAL_SPACE, "true"));
-                writer.writeOpenTag("mean");
-                writeParameter(UCLD_MEAN, writer);
-                writer.writeCloseTag("mean");
-                writer.writeOpenTag("stdev");
-                writeParameter(UCLD_STDEV, writer);
-                writer.writeCloseTag("stdev");
-                writer.writeCloseTag(LogNormalDistributionModelParser.LOGNORMAL_DISTRIBUTION_MODEL);
-            } else {
-                throw new RuntimeException("Unrecognised relaxed clock model");
-            }
-            writer.writeCloseTag("distribution");
-            writer.writeOpenTag("rateCategories");
-            int categoryCount = (alignment.getSequenceCount() - 1) * 2;
-            writeParameter("branchRates.categories", categoryCount, writer);
-            writer.writeCloseTag("rateCategories");
-            writer.writeCloseTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    RateStatisticParser.RATE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "meanRate"),
-                            new Attribute.Default<String>("name", "meanRate"),
-                            new Attribute.Default<String>("mode", "mean"),
-                            new Attribute.Default<String>("internal", "true"),
-                            new Attribute.Default<String>("external", "true")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    RateStatisticParser.RATE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("name", RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("mode", RateStatisticParser.COEFFICIENT_OF_VARIATION),
-                            new Attribute.Default<String>("internal", "true"),
-                            new Attribute.Default<String>("external", "true")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateStatisticParser.RATE_STATISTIC);
-
-            writer.writeText("");
-            writer.writeOpenTag(
-                    RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "covariance"),
-                            new Attribute.Default<String>("name", "covariance")
-                    }
-            );
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES), true);
-            writer.writeCloseTag(RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC);
-        }
-    }
-
-    /**
-     * Write the prior on node heights (coalescent or speciational models)
-     *
-     * @param writer the writer
-     */
-    public void writeNodeHeightPrior(XMLWriter writer) {
-        if (nodeHeightPrior == YULE || nodeHeightPrior == BIRTH_DEATH) {
-            // generate a speciational process
-
-            writer.writeOpenTag(
-                    SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "speciation")
-                    }
-            );
-
-            // write pop size socket
-            writer.writeOpenTag(SpeciationLikelihoodParser.MODEL);
-            writeNodeHeightPriorModelRef(writer);
-            writer.writeCloseTag(SpeciationLikelihoodParser.MODEL);
-            writer.writeOpenTag(SpeciationLikelihoodParser.TREE);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeCloseTag(SpeciationLikelihoodParser.TREE);
-
-            writer.writeCloseTag(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD);
-
-        } else if (nodeHeightPrior == SKYLINE) {
-            // generate a Bayesian skyline plot
-
-            writer.writeOpenTag(
-                    BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "skyline"),
-                            new Attribute.Default<String>("linear", skylineModel == LINEAR_SKYLINE ? "true" : "false")
-                    }
-            );
-
-            // write pop size socket
-            writer.writeOpenTag(BayesianSkylineLikelihoodParser.POPULATION_SIZES);
-            if (skylineModel == LINEAR_SKYLINE) {
-                writeParameter("skyline.popSize", skylineGroupCount + 1, writer);
-            } else {
-                writeParameter("skyline.popSize", skylineGroupCount, writer);
-            }
-            writer.writeCloseTag(BayesianSkylineLikelihoodParser.POPULATION_SIZES);
-
-            // write group size socket
-            writer.writeOpenTag(BayesianSkylineLikelihoodParser.GROUP_SIZES);
-            writeParameter("skyline.groupSize", skylineGroupCount, writer);
-            writer.writeCloseTag(BayesianSkylineLikelihoodParser.GROUP_SIZES);
-
-            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
-
-            writer.writeCloseTag(BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD);
-        } else if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            final String tagName = VariableDemographicModelParser.MODEL_NAME;
-
-            writer.writeOpenTag(
-                    tagName,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, VariableDemographicModelParser.demoElementName),
-                            new Attribute.Default<String>(VariableDemographicModelParser.TYPE, extendedSkylineModel)
-                    }
-            );
-
-            writer.writeOpenTag(VariableDemographicModelParser.POPULATION_SIZES);
-            final int nTax = taxonList.getTaxonCount();
-            final int nPops = nTax - (extendedSkylineModel.equals(VariableDemographicModel.Type.STEPWISE.toString()) ? 1 : 0);
-            writeParameter(VariableDemographicModelParser.demoElementName + ".popSize", nPops, writer);
-            writer.writeCloseTag(VariableDemographicModelParser.POPULATION_SIZES);
-
-            writer.writeOpenTag(VariableDemographicModelParser.INDICATOR_PARAMETER);
-            writeParameter(VariableDemographicModelParser.demoElementName + ".indicators", nPops - 1, writer);
-            writer.writeCloseTag(VariableDemographicModelParser.INDICATOR_PARAMETER);
-
-            writer.writeOpenTag(VariableDemographicModelParser.POPULATION_TREES);
-
-            writer.writeOpenTag(VariableDemographicModelParser.POP_TREE);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeCloseTag(VariableDemographicModelParser.POP_TREE);
-
-            writer.writeCloseTag(VariableDemographicModelParser.POPULATION_TREES);
-
-            writer.writeCloseTag(tagName);
-
-            writer.writeOpenTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "coalescent"));
-            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
-            writer.writeTag(tagName, new Attribute.Default<String>(XMLParser.IDREF, VariableDemographicModelParser.demoElementName), true);
-            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
-            writer.writeComment("Take population Tree from demographic");
-            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);
-
-            writer.writeOpenTag(SumStatisticParser.SUM_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, VariableDemographicModelParser.demoElementName + ".populationSizeChanges"),
-                            new Attribute.Default<String>("elementwise", "true")
-                    });
-            writer.writeTag(ParameterParser.PARAMETER,
-                    new Attribute.Default<String>(XMLParser.IDREF, VariableDemographicModelParser.demoElementName + ".indicators"), true);
-            writer.writeCloseTag(SumStatisticParser.SUM_STATISTIC);
-            writer.writeOpenTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, VariableDemographicModelParser.demoElementName + ".populationMeanDist")
-                            //,new Attribute.Default<String>("elementwise", "true")
-                    });
-            writer.writeOpenTag(DistributionModelParser.MEAN);
-            writer.writeTag(ParameterParser.PARAMETER,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, VariableDemographicModelParser.demoElementName + ".populationMean"),
-                            new Attribute.Default<String>("value", "1")}, true);
-            writer.writeCloseTag(DistributionModelParser.MEAN);
-            writer.writeCloseTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL);
-
-        } else {
-            // generate a coalescent process
-
-            writer.writeOpenTag(
-                    CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD,
-                    new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "coalescent")}
-            );
-            writer.writeOpenTag(CoalescentLikelihoodParser.MODEL);
-            writeNodeHeightPriorModelRef(writer);
-            writer.writeCloseTag(CoalescentLikelihoodParser.MODEL);
-            writer.writeOpenTag(CoalescentLikelihoodParser.POPULATION_TREE);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            writer.writeCloseTag(CoalescentLikelihoodParser.POPULATION_TREE);
-            writer.writeCloseTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD);
-        }
-    }
-
-    /**
-     * Write the boolean likelihood
-     *
-     * @param writer the writer
-     */
-    public void writeBooleanLikelihood(XMLWriter writer) {
-        writer.writeOpenTag(
-                BooleanLikelihoodParser.BOOLEAN_LIKELIHOOD,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "booleanLikelihood1")}
-        );
-        writer.writeOpenTag(
-                TestStatisticParser.TEST_STATISTIC,
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, "test1"),
-                        new Attribute.Default<String>("name", "test1")
-                }
-        );
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "logistic.t50"), true);
-        writer.writeOpenTag("lessThan");
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "treeModel.rootHeight"), true);
-        writer.writeCloseTag("lessThan");
-        writer.writeCloseTag(TestStatisticParser.TEST_STATISTIC);
-        writer.writeCloseTag(BooleanLikelihoodParser.BOOLEAN_LIKELIHOOD);
-    }
-
-    public void writeExponentialMarkovLikelihood(XMLWriter writer) {
-        writer.writeOpenTag(
-                ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "eml1"),
-                        new Attribute.Default<String>("jeffreys", "true")}
-        );
-        writer.writeOpenTag(ExponentialMarkovModelParser.CHAIN_PARAMETER);
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "skyline.popSize"), true);
-        writer.writeCloseTag(ExponentialMarkovModelParser.CHAIN_PARAMETER);
-        writer.writeCloseTag(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL);
-    }
-
-
-    /**
-     * Write the tree likelihood XML block.
-     *
-     * @param writer the writer
-     */
-    public void writeTreeLikelihood(XMLWriter writer) {
-
-        boolean nucs = alignment.getDataType() == Nucleotides.INSTANCE;
-        if (nucs && codonHeteroPattern != null) {
-            for (int i = 1; i <= partitionCount; i++) {
-                writeTreeLikelihood(i, writer);
-            }
-        } else {
-            writeTreeLikelihood(-1, writer);
-        }
-    }
-
-
-    /**
-     * Determine and return the datatype description for these beast options
-     * note that the datatype in XML may differ from the actual datatype
-     *
-     * @return description
-     */
-
-    private Boolean useAmbiguities() {
-        Boolean useAmbiguities = false;
-
-        switch (dataType) {
-            case DataType.TWO_STATES:
-            case DataType.COVARION:
-
-                switch (binarySubstitutionModel) {
-                    case BIN_COVARION:
-                        useAmbiguities = true;
-                        break;
-
-                    default:
-                }
-                break;
-
-            default:
-                useAmbiguities = false;
-        }
-
-        return useAmbiguities;
-    }
-
-    /**
-     * Write the tree likelihood XML block.
-     *
-     * @param num    the likelihood number
-     * @param writer the writer
-     */
-    public void writeTreeLikelihood(int num, XMLWriter writer) {
-
-        if (num > 0) {
-            writer.writeOpenTag(
-                    TreeLikelihoodParser.TREE_LIKELIHOOD,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, TreeLikelihoodParser.TREE_LIKELIHOOD + num),
-                            new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, useAmbiguities())}
-            );
-            if (codonHeteroPattern.equals("112")) {
-                if (num == 1) {
-                    writer.writeTag(SitePatternsParser.PATTERNS, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "patterns1+2")}, true);
-                } else {
-                    writer.writeTag(SitePatternsParser.PATTERNS, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "patterns3")}, true);
-                }
-            } else {
-                writer.writeTag(SitePatternsParser.PATTERNS, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, SitePatternsParser.PATTERNS + num)}, true);
-            }
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-            writer.writeTag(GammaSiteModel.SITE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + num)}, true);
-        } else {
-            writer.writeOpenTag(
-                    TreeLikelihoodParser.TREE_LIKELIHOOD,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, TreeLikelihoodParser.TREE_LIKELIHOOD),
-                            new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, useAmbiguities())
-                    }
-            );
-            writer.writeTag(SitePatternsParser.PATTERNS, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, SitePatternsParser.PATTERNS)}, true);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-            writer.writeTag(GammaSiteModel.SITE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL)}, true);
-        }
-        if (clockModel == STRICT_CLOCK) {
-            writer.writeTag(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES)}, true);
-        } else {
-            writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES)}, true);
-        }
-
-        writer.writeCloseTag(TreeLikelihoodParser.TREE_LIKELIHOOD);
-    }
-
-    /**
-     * Generate tmrca statistics
-     *
-     * @param writer the writer
-     */
-    public void writeTMRCAStatistics(XMLWriter writer) {
-
-        writer.writeText("");
-        for (Taxa taxa : taxonSets) {
-            writer.writeOpenTag(
-                    TMRCAStatisticParser.TMRCA_STATISTIC,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "tmrca(" + taxa.getId() + ")"),
-                    }
-            );
-            writer.writeOpenTag(TMRCAStatisticParser.MRCA);
-            writer.writeTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, taxa.getId())}, true);
-            writer.writeCloseTag(TMRCAStatisticParser.MRCA);
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-            writer.writeCloseTag(TMRCAStatisticParser.TMRCA_STATISTIC);
-
-            if (taxonSetsMono.get(taxa)) {
-                writer.writeOpenTag(
-                        MonophylyStatisticParser.MONOPHYLY_STATISTIC,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, "monophyly(" + taxa.getId() + ")"),
-                        });
-                writer.writeOpenTag(MonophylyStatisticParser.MRCA);
-                writer.writeTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, taxa.getId())}, true);
-                writer.writeCloseTag(MonophylyStatisticParser.MRCA);
-                writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-                writer.writeCloseTag(MonophylyStatisticParser.MONOPHYLY_STATISTIC);
-            }
-        }
-    }
-
-    /**
-     * Write the operator schedule XML block.
-     *
-     * @param operators the list of operators
-     * @param writer    the writer
-     */
-    public void writeOperatorSchedule(ArrayList<Operator> operators, XMLWriter writer) {
-        writer.writeOpenTag(
-                SimpleOperatorScheduleParser.OPERATOR_SCHEDULE,
-                new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "operators")}
-        );
-
-        for (Operator operator : operators) {
-            if (operator.weight > 0. && operator.inUse)
-                writeOperator(operator, writer);
-        }
-
-        writer.writeCloseTag(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE);
-    }
-
-    private void writeOperator(Operator operator, XMLWriter writer) {
-        if (operator.type.equals(SCALE)) {
-            writeScaleOperator(operator, writer);
-        } else if (operator.type.equals(RANDOM_WALK)) {
-            writeRandomWalkOperator(operator, writer);
-        } else if (operator.type.equals(INTEGER_RANDOM_WALK)) {
-            writeIntegerRandomWalkOperator(operator, writer);
-        } else if (operator.type.equals(UP_DOWN)) {
-            writeUpDownOperator(operator, writer);
-        } else if (operator.type.equals(SCALE_ALL)) {
-            writeScaleAllOperator(operator, writer);
-        } else if (operator.type.equals(CENTERED_SCALE)) {
-            writeCenteredOperator(operator, writer);
-        } else if (operator.type.equals(DELTA_EXCHANGE)) {
-            writeDeltaOperator(operator, writer);
-        } else if (operator.type.equals(INTEGER_DELTA_EXCHANGE)) {
-            writeIntegerDeltaOperator(operator, writer);
-        } else if (operator.type.equals(SWAP)) {
-            writeSwapOperator(operator, writer);
-        } else if (operator.type.equals(BITFLIP)) {
-            writeBitFlipOperator(operator, writer);
-        } else if (operator.type.equals(TREE_BIT_MOVE)) {
-            writeTreeBitMoveOperator(operator, writer);
-        } else if (operator.type.equals(UNIFORM)) {
-            writeUniformOperator(operator, writer);
-        } else if (operator.type.equals(INTEGER_UNIFORM)) {
-            writeIntegerUniformOperator(operator, writer);
-        } else if (operator.type.equals(SUBTREE_SLIDE)) {
-            writeSubtreeSlideOperator(operator, writer);
-        } else if (operator.type.equals(NARROW_EXCHANGE)) {
-            writeNarrowExchangeOperator(operator, writer);
-        } else if (operator.type.equals(WIDE_EXCHANGE)) {
-            writeWideExchangeOperator(operator, writer);
-        } else if (operator.type.equals(WILSON_BALDING)) {
-            writeWilsonBaldingOperator(operator, writer);
-        } else if (operator.type.equals(SAMPLE_NONACTIVE)) {
-            writeSampleNonActiveOperator(operator, writer);
-        } else if (operator.type.equals(SCALE_WITH_INDICATORS)) {
-            writeScaleWithIndicatorsOperator(operator, writer);
-        }
-    }
-
-    private Attribute getRef(String name) {
-        return new Attribute.Default<String>(XMLParser.IDREF, name);
-    }
-
-    private void writeParameterRefByName(XMLWriter writer, String name) {
-        writer.writeTag(ParameterParser.PARAMETER, getRef(name), true);
-    }
-
-    private void writeParameter1Ref(XMLWriter writer, Operator operator) {
-        writeParameterRefByName(writer, operator.parameter1.getName());
-    }
-
-    private void writeScaleOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(
-                ScaleOperatorParser.SCALE_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                });
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(ScaleOperatorParser.SCALE_OPERATOR);
-    }
-
-    private void writeRandomWalkOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(
-                "randomWalkOperator",
-                new Attribute[]{
-                        new Attribute.Default<Double>("windowSize", operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight)
-                });
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag("randomWalkOperator");
-    }
-
-    private void writeIntegerRandomWalkOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(
-                RandomWalkIntegerOperatorParser.RANDOM_WALK_INTEGER_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>("windowSize", operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight)
-                });
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(RandomWalkIntegerOperatorParser.RANDOM_WALK_INTEGER_OPERATOR);
-    }
-
-    private void writeScaleAllOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(
-                ScaleOperatorParser.SCALE_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        new Attribute.Default<String>(ScaleOperatorParser.SCALE_ALL, "true"),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                });
-        writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER);
-        writeParameter1Ref(writer, operator);
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, operator.parameter2.getName())}, true);
-        writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
-        writer.writeCloseTag(ScaleOperatorParser.SCALE_OPERATOR);
-    }
-
-    private void writeUpDownOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(UpDownOperatorParser.UP_DOWN_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>(UpDownOperatorParser.SCALE_FACTOR, operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                }
-        );
-
-        writer.writeOpenTag(UpDownOperatorParser.UP);
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(UpDownOperatorParser.UP);
-
-        writer.writeOpenTag(UpDownOperatorParser.DOWN);
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, operator.parameter2.getName())}, true);
-        writer.writeCloseTag(UpDownOperatorParser.DOWN);
-
-        writer.writeCloseTag(UpDownOperatorParser.UP_DOWN_OPERATOR);
-    }
-
-    private void writeCenteredOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(CenteredScaleOperatorParser.CENTERED_SCALE,
-                new Attribute[]{
-                        new Attribute.Default<Double>(CenteredScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                }
-        );
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(CenteredScaleOperatorParser.CENTERED_SCALE);
-    }
-
-    private void writeDeltaOperator(Operator operator, XMLWriter writer) {
-        partitionCount = getPartionCount(codonHeteroPattern);
-
-        if (operator.name.equals("Relative rates") && codonHeteroPattern.equals("112")) {
-            writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                    new Attribute[]{
-                            new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.tuning),
-                            new Attribute.Default<String>(DeltaExchangeOperatorParser.PARAMETER_WEIGHTS, "2 1"),
-                            new Attribute.Default<Double>("weight", operator.weight),
-                    }
-            );
-        } else {
-            writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                    new Attribute[]{
-                            new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.tuning),
-                            new Attribute.Default<Double>("weight", operator.weight),
-                    }
-            );
-        }
-
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE);
-    }
-
-    private void writeIntegerDeltaOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                new Attribute[]{
-                        new Attribute.Default<String>(DeltaExchangeOperatorParser.DELTA, Integer.toString((int) operator.tuning)),
-                        new Attribute.Default<String>("integer", "true"),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                        new Attribute.Default<String>("autoOptimize", "false")
-                }
-        );
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE);
-    }
-
-    private void writeSwapOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(SwapOperatorParser.SWAP_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<String>("size", Integer.toString((int) operator.tuning)),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                        new Attribute.Default<String>("autoOptimize", "false")
-                }
-        );
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(SwapOperatorParser.SWAP_OPERATOR);
-    }
-
-    private void writeBitFlipOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(BitFlipOperatorParser.BIT_FLIP_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>("weight", operator.weight),
-                }
-        );
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(BitFlipOperatorParser.BIT_FLIP_OPERATOR);
-    }
-
-    private void writeTreeBitMoveOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(TreeBitMoveOperatorParser.BIT_MOVE_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>("weight", operator.weight),
-                }
-        );
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-        writer.writeCloseTag(TreeBitMoveOperatorParser.BIT_MOVE_OPERATOR);
-    }
-
-    private void writeUniformOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag("uniformOperator",
-                new Attribute.Default<Double>("weight", operator.weight));
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag("uniformOperator");
-    }
-
-    private void writeIntegerUniformOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag("uniformIntegerOperator",
-                new Attribute.Default<Double>("weight", operator.weight));
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag("uniformIntegerOperator");
-    }
-
-    private void writeNarrowExchangeOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(ExchangeOperatorParser.NARROW_EXCHANGE,
-                new Attribute.Default<Double>("weight", operator.weight));
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-        writer.writeCloseTag(ExchangeOperatorParser.NARROW_EXCHANGE);
-    }
-
-    private void writeWideExchangeOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(ExchangeOperatorParser.WIDE_EXCHANGE,
-                new Attribute.Default<Double>("weight", operator.weight));
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-        writer.writeCloseTag(ExchangeOperatorParser.WIDE_EXCHANGE);
-    }
-
-    private void writeWilsonBaldingOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(WilsonBaldingParser.WILSON_BALDING,
-                new Attribute.Default<Double>("weight", operator.weight));
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-        if (nodeHeightPrior == CONSTANT) {
-            writeNodeHeightPriorModelRef(writer);
-        }
-        writer.writeCloseTag(WilsonBaldingParser.WILSON_BALDING);
-    }
-
-    private void writeSampleNonActiveOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.SAMPLE_NONACTIVE_GIBBS_OPERATOR,
-                new Attribute.Default<Double>("weight", operator.weight));
-
-        writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.DISTRIBUTION);
-        writeParameterRefByName(writer, operator.name);
-        writer.writeCloseTag(SampleNonActiveGibbsOperatorParser.DISTRIBUTION);
-
-        writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.DATA_PARAMETER);
-        writeParameter1Ref(writer, operator);
-        writer.writeCloseTag(SampleNonActiveGibbsOperatorParser.DATA_PARAMETER);
-
-        writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.INDICATOR_PARAMETER);
-        writeParameterRefByName(writer, operator.parameter2.getName());
-        writer.writeCloseTag(SampleNonActiveGibbsOperatorParser.INDICATOR_PARAMETER);
-
-        writer.writeCloseTag(SampleNonActiveGibbsOperatorParser.SAMPLE_NONACTIVE_GIBBS_OPERATOR);
-    }
-
-    private void writeScaleWithIndicatorsOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(
-                ScaleOperatorParser.SCALE_OPERATOR,
-                new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        new Attribute.Default<Double>("weight", operator.weight),
-                });
-        writeParameter1Ref(writer, operator);
-        writer.writeOpenTag(ScaleOperatorParser.INDICATORS, new Attribute.Default<String>(ScaleOperatorParser.PICKONEPROB, "1.0"));
-        writeParameterRefByName(writer, operator.parameter2.getName());
-        writer.writeCloseTag(ScaleOperatorParser.INDICATORS);
-        writer.writeCloseTag(ScaleOperatorParser.SCALE_OPERATOR);
-    }
-
-    private void writeSubtreeSlideOperator(Operator operator, XMLWriter writer) {
-        writer.writeOpenTag(SubtreeSlideOperatorParser.SUBTREE_SLIDE,
-                new Attribute[]{
-                        new Attribute.Default<Double>("size", operator.tuning),
-                        new Attribute.Default<String>("gaussian", "true"),
-                        new Attribute.Default<Double>("weight", operator.weight)
-                }
-        );
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "treeModel")}, true);
-        writer.writeCloseTag(SubtreeSlideOperatorParser.SUBTREE_SLIDE);
-    }
-
-    /**
-     * Write the timer report block.
-     *
-     * @param writer the writer
-     */
-    public void writeTimerReport(XMLWriter writer) {
-        writer.writeOpenTag("report");
-        writer.writeOpenTag("property", new Attribute.Default<String>("name", "timer"));
-        writer.writeTag("object", new Attribute.Default<String>(XMLParser.IDREF, "mcmc"), true);
-        writer.writeCloseTag("property");
-        writer.writeCloseTag("report");
-    }
-
-    /**
-     * Write the trace analysis block.
-     *
-     * @param writer the writer
-     */
-    public void writeTraceAnalysis(XMLWriter writer) {
-        writer.writeTag(
-                "traceAnalysis",
-                new Attribute[]{
-                        new Attribute.Default<String>("fileName", logFileName)
-                },
-                true
-        );
-    }
-
-    public void writeAnalysisToCSVfile(XMLWriter writer) {
-        if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            writer.writeOpenTag(EBSPAnalysisParser.VD_ANALYSIS, new Attribute[]{
-                    new Attribute.Default<String>(XMLParser.ID, "demographic.analysis"),
-                    new Attribute.Default<Double>(EBSPAnalysisParser.BURN_IN, 0.1)}
-            );
-
-            writer.writeOpenTag(EBSPAnalysisParser.LOG_FILE_NAME);
-            writer.writeText(logFileName);
-            writer.writeCloseTag(EBSPAnalysisParser.LOG_FILE_NAME);
-
-            writer.writeOpenTag(EBSPAnalysisParser.TREE_FILE_NAMES);
-            writer.writeOpenTag(EBSPAnalysisParser.TREE_LOG);
-            writer.writeText(treeFileName);
-            writer.writeCloseTag(EBSPAnalysisParser.TREE_LOG);
-            writer.writeCloseTag(EBSPAnalysisParser.TREE_FILE_NAMES);
-
-            writer.writeOpenTag(EBSPAnalysisParser.MODEL_TYPE);
-            writer.writeText(extendedSkylineModel);
-            writer.writeCloseTag(EBSPAnalysisParser.MODEL_TYPE);
-
-            writer.writeOpenTag(EBSPAnalysisParser.POPULATION_FIRST_COLUMN);
-            writer.writeText(VariableDemographicModelParser.demoElementName + ".popSize" + 1);
-            writer.writeCloseTag(EBSPAnalysisParser.POPULATION_FIRST_COLUMN);
-
-            writer.writeOpenTag(EBSPAnalysisParser.INDICATORS_FIRST_COLUMN);
-            writer.writeText(VariableDemographicModelParser.demoElementName + ".indicators" + 1);
-            writer.writeCloseTag(EBSPAnalysisParser.INDICATORS_FIRST_COLUMN);
-
-            writer.writeCloseTag(EBSPAnalysisParser.VD_ANALYSIS);
-
-            writer.writeOpenTag(CSVExporterParser.CSV_EXPORT,
-                    new Attribute[]{
-                            new Attribute.Default<String>(CSVExporterParser.FILE_NAME,
-                                    logFileName.subSequence(0, logFileName.length() - 4) + ".csv"),
-                            new Attribute.Default<String>(CSVExporterParser.SEPARATOR, ",")
-                    });
-            writer.writeOpenTag(CSVExporterParser.COLUMNS);
-            writer.writeTag(EBSPAnalysisParser.VD_ANALYSIS,
-                    new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "demographic.analysis")}, true);
-            writer.writeCloseTag(CSVExporterParser.COLUMNS);
-            writer.writeCloseTag(CSVExporterParser.CSV_EXPORT);
-        }
-
-    }
-
-    /**
-     * Write the MCMC block.
-     *
-     * @param writer the writer
-     */
-    public void writeMCMC(XMLWriter writer) {
-        writer.writeOpenTag(
-                "mcmc",
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, "mcmc"),
-                        new Attribute.Default<Integer>("chainLength", chainLength),
-                        new Attribute.Default<String>("autoOptimize", autoOptimize ? "true" : "false")
-                });
-
-        if (alignment != null) {
-            // we have data...
-            writer.writeOpenTag(CompoundLikelihoodParser.POSTERIOR, new Attribute.Default<String>(XMLParser.ID, "posterior"));
-        }
-
-        // write prior block
-        writer.writeOpenTag(CompoundLikelihoodParser.PRIOR, new Attribute.Default<String>(XMLParser.ID, "prior"));
-
-        writeParameterPriors(writer);
-
-
-        if (nodeHeightPrior == YULE || nodeHeightPrior == BIRTH_DEATH) {
-            writer.writeTag(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "speciation"), true);
-        } else if (nodeHeightPrior == SKYLINE) {
-            writer.writeTag(BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "skyline"), true);
-        } else {
-            writer.writeTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "coalescent"), true);
-        }
-
-        if (nodeHeightPrior == LOGISTIC) {
-            writer.writeTag(BooleanLikelihoodParser.BOOLEAN_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "booleanLikelihood1"), true);
-        }
-
-        if (nodeHeightPrior == SKYLINE) {
-            writer.writeTag(ExponentialMarkovModel.EXPONENTIAL_MARKOV_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "eml1"), true);
-        }
-        if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            writer.writeOpenTag(MixedDistributionLikelihoodParser.DISTRIBUTION_LIKELIHOOD);
-
-            writer.writeOpenTag(MixedDistributionLikelihoodParser.DISTRIBUTION0);
-            writer.writeTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL,
-                    new Attribute.Default<String>(XMLParser.IDREF, "demographic.populationMeanDist"), true);
-            writer.writeCloseTag(MixedDistributionLikelihoodParser.DISTRIBUTION0);
-
-            writer.writeOpenTag(MixedDistributionLikelihoodParser.DISTRIBUTION1);
-            writer.writeTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL,
-                    new Attribute.Default<String>(XMLParser.IDREF, "demographic.populationMeanDist"), true);
-            writer.writeCloseTag(MixedDistributionLikelihoodParser.DISTRIBUTION1);
-
-            writer.writeOpenTag(MixedDistributionLikelihoodParser.DATA);
-            writer.writeTag(ParameterParser.PARAMETER,
-                    new Attribute.Default<String>(XMLParser.IDREF, "demographic.popSize"), true);
-            writer.writeCloseTag(MixedDistributionLikelihoodParser.DATA);
-
-            writer.writeOpenTag(MixedDistributionLikelihoodParser.INDICATORS);
-            writer.writeTag(ParameterParser.PARAMETER,
-                    new Attribute.Default<String>(XMLParser.IDREF, "demographic.indicators"), true);
-            writer.writeCloseTag(MixedDistributionLikelihoodParser.INDICATORS);
-
-            writer.writeCloseTag(MixedDistributionLikelihoodParser.DISTRIBUTION_LIKELIHOOD);
-        }
-        writer.writeCloseTag(CompoundLikelihoodParser.PRIOR);
-
-        if (alignment != null) {
-            // write likelihood block
-            writer.writeOpenTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "likelihood"));
-
-            boolean nucs = alignment.getDataType() == Nucleotides.INSTANCE;
-            if (nucs && codonHeteroPattern != null) {
-                for (int i = 1; i <= partitionCount; i++) {
-                    writer.writeTag(TreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, TreeLikelihoodParser.TREE_LIKELIHOOD + i), true);
-                }
-            } else {
-                writer.writeTag(TreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, TreeLikelihoodParser.TREE_LIKELIHOOD), true);
-            }
-
-            writer.writeCloseTag(CompoundLikelihoodParser.LIKELIHOOD);
-
-
-            writer.writeCloseTag(CompoundLikelihoodParser.POSTERIOR);
-        }
-
-        writer.writeTag(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE, new Attribute.Default<String>(XMLParser.IDREF, "operators"), true);
-
-        // write log to screen
-        writer.writeOpenTag(LoggerParser.LOG,
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, "screenLog"),
-                        new Attribute.Default<String>(LoggerParser.LOG_EVERY, echoEvery + "")
-                });
-        writeScreenLog(writer);
-        writer.writeCloseTag(LoggerParser.LOG);
-
-        // write log to file
-        if (logFileName == null) {
-            logFileName = fileNameStem + ".log";
-        }
-        writer.writeOpenTag(LoggerParser.LOG,
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, "fileLog"),
-                        new Attribute.Default<String>(LoggerParser.LOG_EVERY, logEvery + ""),
-                        new Attribute.Default<String>(LoggerParser.FILE_NAME, logFileName)
-                });
-        writeLog(writer);
-        writer.writeCloseTag(LoggerParser.LOG);
-
-        // write tree log to file
-        if (treeFileName == null) {
-            if (substTreeLog) {
-                treeFileName = fileNameStem + "(time).trees";
-            } else {
-                treeFileName = fileNameStem + ".trees";
-            }
-        }
-        writer.writeOpenTag(TreeLoggerParser.LOG_TREE,
-                new Attribute[]{
-                        new Attribute.Default<String>(XMLParser.ID, "treeFileLog"),
-                        new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, logEvery + ""),
-                        new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"),
-                        new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeFileName),
-                        new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true")
-                });
-        writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-        if (clockModel != STRICT_CLOCK) {
-            writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES)}, true);
-        }
-        if (alignment != null) {
-            // we have data...
-            writer.writeTag("posterior", new Attribute.Default<String>(XMLParser.IDREF, "posterior"), true);
-        }
-        writer.writeCloseTag(TreeLoggerParser.LOG_TREE);
-
-//        if (mapTreeLog) {
-//            // write tree log to file
-//            if (mapTreeFileName == null) {
-//                mapTreeFileName = fileNameStem + ".MAP.tree";
-//            }
-//            writer.writeOpenTag("logML",
-//                    new Attribute[] {
-//                        new Attribute.Default<String>(TreeLogger.FILE_NAME, mapTreeFileName)
-//                    });
-//            writer.writeOpenTag("ml");
-//            writer.writeTag(CompoundLikelihood.POSTERIOR, new Attribute.Default<String>(XMLParser.IDREF, "posterior"), true);
-//            writer.writeCloseTag("ml");
-//            writer.writeOpenTag("column", new Attribute[] {
-//                        new Attribute.Default<String>("label", "MAP tree")
-//                    });
-//            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-//            writer.writeCloseTag("column");
-//            writer.writeCloseTag("logML");
-//        }
-
-        if (substTreeLog) {
-            // write tree log to file
-            if (substTreeFileName == null) {
-                substTreeFileName = fileNameStem + "(subst).trees";
-            }
-            writer.writeOpenTag(TreeLoggerParser.LOG_TREE,
-                    new Attribute[]{
-                            new Attribute.Default<String>(XMLParser.ID, "substTreeFileLog"),
-                            new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, logEvery + ""),
-                            new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"),
-                            new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, substTreeFileName),
-                            new Attribute.Default<String>(TreeLoggerParser.BRANCH_LENGTHS, TreeLoggerParser.SUBSTITUTIONS)
-                    });
-            writer.writeTag(TreeModel.TREE_MODEL, new Attribute.Default<String>(XMLParser.IDREF, "treeModel"), true);
-            if (clockModel == STRICT_CLOCK) {
-                writer.writeTag(StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES)}, true);
-            } else {
-                writer.writeTag(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, BranchRateModel.BRANCH_RATES)}, true);
-            }
-            writer.writeCloseTag(TreeLoggerParser.LOG_TREE);
-        }
-
-        writer.writeCloseTag("mcmc");
-    }
-
-    /**
-     * Write the priors for each parameter
-     *
-     * @param writer the writer
-     */
-    private void writeParameterPriors(XMLWriter writer) {
-        boolean first = true;
-
-        for (Map.Entry<Taxa, Boolean> taxa : taxonSetsMono.entrySet()) {
-            if ( taxa.getValue() ) {
-                if (first) {
-                    writer.writeOpenTag(BooleanLikelihoodParser.BOOLEAN_LIKELIHOOD);
-                    first = false;
-                }
-                final String taxaRef = "monophyly(" + taxa.getKey().getId() + ")";
-                final Attribute.Default attr = new Attribute.Default<String>(XMLParser.IDREF, taxaRef);
-                writer.writeTag(MonophylyStatisticParser.MONOPHYLY_STATISTIC, new Attribute[]{attr}, true);
-            }
-        }
-        if (!first) {
-            writer.writeCloseTag(BooleanLikelihoodParser.BOOLEAN_LIKELIHOOD);
-        }
-
-        ArrayList<Parameter> parameters = selectParameters();
-        for (Parameter parameter : parameters) {
-            if (parameter.priorType != PriorType.NONE) {
-                if (parameter.priorType != PriorType.UNIFORM_PRIOR || parameter.isNodeHeight) {
-                    writeParameterPrior(parameter, writer);
-                }
-            }
-        }
-
-    }
-
-    /**
-     * Write the priors for each parameter
-     *
-     * @param parameter the parameter
-     * @param writer    the writer
-     */
-    private void writeParameterPrior(Parameter parameter, XMLWriter writer) {
-        switch (parameter.priorType) {
-            case UNIFORM_PRIOR:
-                writer.writeOpenTag(PriorParsers.UNIFORM_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.LOWER, "" + parameter.uniformLower),
-                                new Attribute.Default<String>(PriorParsers.UPPER, "" + parameter.uniformUpper)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.UNIFORM_PRIOR);
-                break;
-            case EXPONENTIAL_PRIOR:
-                writer.writeOpenTag(PriorParsers.EXPONENTIAL_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.exponentialMean),
-                                new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.exponentialOffset)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.EXPONENTIAL_PRIOR);
-                break;
-            case NORMAL_PRIOR:
-                writer.writeOpenTag(PriorParsers.NORMAL_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.normalMean),
-                                new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.normalStdev)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.NORMAL_PRIOR);
-                break;
-            case LOGNORMAL_PRIOR:
-                writer.writeOpenTag(PriorParsers.LOG_NORMAL_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.logNormalMean),
-                                new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.logNormalStdev),
-                                new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.logNormalOffset),
-
-                                // this is to be implemented...
-                                new Attribute.Default<String>(PriorParsers.MEAN_IN_REAL_SPACE, "false")
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.LOG_NORMAL_PRIOR);
-                break;
-            case GAMMA_PRIOR:
-                writer.writeOpenTag(PriorParsers.GAMMA_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.SHAPE, "" + parameter.gammaAlpha),
-                                new Attribute.Default<String>(PriorParsers.SCALE, "" + parameter.gammaBeta),
-                                new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.gammaOffset)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.GAMMA_PRIOR);
-                break;
-            case JEFFREYS_PRIOR:
-                writer.writeOpenTag(OneOnXPriorParser.ONE_ONE_X_PRIOR);
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(OneOnXPriorParser.ONE_ONE_X_PRIOR);
-                break;
-            case POISSON_PRIOR:
-                writer.writeOpenTag(PriorParsers.POISSON_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.poissonMean),
-                                new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.poissonOffset)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.POISSON_PRIOR);
-                break;
-            case TRUNC_NORMAL_PRIOR:
-                writer.writeOpenTag(PriorParsers.UNIFORM_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.LOWER, "" + parameter.uniformLower),
-                                new Attribute.Default<String>(PriorParsers.UPPER, "" + parameter.uniformUpper)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.UNIFORM_PRIOR);
-                writer.writeOpenTag(PriorParsers.NORMAL_PRIOR,
-                        new Attribute[]{
-                                new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.normalMean),
-                                new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.normalStdev)
-                        });
-                writeParameterIdref(writer, parameter);
-                writer.writeCloseTag(PriorParsers.NORMAL_PRIOR);
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown priorType");
-        }
-    }
-
-    private void writeParameterIdref(XMLWriter writer, Parameter parameter) {
-        if (parameter.isStatistic) {
-            writer.writeTag("statistic", new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, parameter.getName())}, true);
-        } else {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, parameter.getName())}, true);
-        }
-    }
-
-    private void writeSumStatisticColumn(XMLWriter writer, String name, String label) {
-        writer.writeOpenTag(ColumnsParser.COLUMN,
-                new Attribute[]{
-                        new Attribute.Default<String>(ColumnsParser.LABEL, label),
-                        new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "0"),
-                        new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                }
-        );
-        writer.writeTag(SumStatisticParser.SUM_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, name), true);
-        writer.writeCloseTag(ColumnsParser.COLUMN);
-    }
-
-    /**
-     * Write the log
-     *
-     * @param writer the writer
-     */
-    private void writeScreenLog(XMLWriter writer) {
-        if (alignment != null) {
-            writer.writeOpenTag(ColumnsParser.COLUMN,
-                    new Attribute[]{
-                            new Attribute.Default<String>(ColumnsParser.LABEL, "Posterior"),
-                            new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-                            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                    }
-            );
-            writer.writeTag(CompoundLikelihoodParser.POSTERIOR, new Attribute.Default<String>(XMLParser.IDREF, "posterior"), true);
-            writer.writeCloseTag(ColumnsParser.COLUMN);
-        }
-
-        writer.writeOpenTag(ColumnsParser.COLUMN,
-                new Attribute[]{
-                        new Attribute.Default<String>(ColumnsParser.LABEL, "Prior"),
-                        new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-                        new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                }
-        );
-        writer.writeTag(CompoundLikelihoodParser.PRIOR, new Attribute.Default<String>(XMLParser.IDREF, "prior"), true);
-        writer.writeCloseTag(ColumnsParser.COLUMN);
-
-        if (alignment != null) {
-            writer.writeOpenTag(ColumnsParser.COLUMN,
-                    new Attribute[]{
-                            new Attribute.Default<String>(ColumnsParser.LABEL, "Likelihood"),
-                            new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-                            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                    }
-            );
-            writer.writeTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "likelihood"), true);
-            writer.writeCloseTag(ColumnsParser.COLUMN);
-        }
-
-        writer.writeOpenTag(ColumnsParser.COLUMN,
-                new Attribute[]{
-                        new Attribute.Default<String>(ColumnsParser.LABEL, "Root Height"),
-                        new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
-                        new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                }
-        );
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "treeModel.rootHeight"), true);
-        writer.writeCloseTag(ColumnsParser.COLUMN);
-
-        writer.writeOpenTag(ColumnsParser.COLUMN,
-                new Attribute[]{
-                        new Attribute.Default<String>(ColumnsParser.LABEL, "Rate"),
-                        new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
-                        new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                }
-        );
-        if (clockModel == STRICT_CLOCK) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "clock.rate"), true);
-        } else {
-            writer.writeTag(RateStatisticParser.RATE_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, "meanRate"), true);
-        }
-        writer.writeCloseTag(ColumnsParser.COLUMN);
-
-        if (clockModel == RANDOM_LOCAL_CLOCK) {
-            writeSumStatisticColumn(writer, "rateChanges", "Rate Changes");
-        }
-
-        // I think this is too much info for the screen - it is all in the log file.
-//		if (alignment != null) {
-//			boolean nucs = alignment.getDataType() == Nucleotides.INSTANCE;
-//			if (nucs && codonHeteroPattern != null) {
-//				if (codonHeteroPattern.equals("112")) {
-//					writer.writeOpenTag(ColumnsParser.COLUMN,
-//							new Attribute[] {
-//									new Attribute.Default<String>(ColumnsParser.LABEL, "L(codon pos 1+2)"),
-//									new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//									new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//							}
-//					);
-//					writer.writeTag(OldTreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF,"treeLikelihood1"), true);
-//					writer.writeCloseTag(ColumnsParser.COLUMN);
-//					writer.writeOpenTag(ColumnsParser.COLUMN,
-//							new Attribute[] {
-//									new Attribute.Default<String>(ColumnsParser.LABEL, "L(codon pos 3)"),
-//									new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//									new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//							}
-//					);
-//					writer.writeTag(OldTreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF,"treeLikelihood2"), true);
-//					writer.writeCloseTag(ColumnsParser.COLUMN);
-//				} else if (codonHeteroPattern.equals("123")) {
-//					for (int i =1; i <= 3; i++) {
-//						writer.writeOpenTag(ColumnsParser.COLUMN,
-//								new Attribute[] {
-//										new Attribute.Default<String>(ColumnsParser.LABEL, "L(codon pos "+i+")"),
-//										new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//										new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//								}
-//						);
-//						writer.writeTag(OldTreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF,OldTreeLikelihoodParser.TREE_LIKELIHOOD + i), true);
-//						writer.writeCloseTag(ColumnsParser.COLUMN);
-//					}
-//				}
-//			} else {
-//				writer.writeOpenTag(ColumnsParser.COLUMN,
-//						new Attribute[] {
-//								new Attribute.Default<String>(ColumnsParser.LABEL, "L(tree)"),
-//								new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//								new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//						}
-//				);
-//				writer.writeTag(OldTreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF,OldTreeLikelihoodParser.TREE_LIKELIHOOD), true);
-//				writer.writeCloseTag(ColumnsParser.COLUMN);
-//			}
-//		}
-//		if (nodeHeightPrior == YULE || nodeHeightPrior == BIRTH_DEATH) {
-//			writer.writeOpenTag(ColumnsParser.COLUMN,
-//					new Attribute[] {
-//							new Attribute.Default<String>(ColumnsParser.LABEL, "L(speciation)"),
-//							new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//							new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//					}
-//			);
-//			writer.writeTag(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "speciation"), true);
-//		} else {
-//			writer.writeOpenTag(ColumnsParser.COLUMN,
-//					new Attribute[] {
-//							new Attribute.Default<String>(ColumnsParser.LABEL, "L(coalecent)"),
-//							new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"),
-//							new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-//					}
-//			);
-//			if (nodeHeightPrior == SKYLINE) {
-//				writer.writeTag(BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "skyline"), true);
-//			} else {
-//				writer.writeTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF,"coalescent"), true);
-//			}
-//		}
-//		writer.writeCloseTag(ColumnsParser.COLUMN);
-
-    }
-
-    /**
-     * Write the log
-     *
-     * @param writer the writer
-     */
-    private void writeLog(XMLWriter writer) {
-        if (alignment != null) {
-            writer.writeTag(CompoundLikelihoodParser.POSTERIOR, new Attribute.Default<String>(XMLParser.IDREF, "posterior"), true);
-        }
-        writer.writeTag(CompoundLikelihoodParser.PRIOR, new Attribute.Default<String>(XMLParser.IDREF, "prior"), true);
-        if (alignment != null) {
-            writer.writeTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "likelihood"), true);
-        }
-
-        // As of v1.4.2, always write the rate parameter even if fixed...
-        //if (!fixedSubstitutionRate) {
-        if (clockModel == STRICT_CLOCK) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "clock.rate"), true);
-        } else {
-            writer.writeTag(RateStatisticParser.RATE_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, "meanRate"), true);
-        }
-        //}
-
-        writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "treeModel.rootHeight"), true);
-
-        for (Taxa taxa : taxonSets) {
-            writer.writeTag("tmrcaStatistic", new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "tmrca(" + taxa.getId() + ")")}, true);
-        }
-
-        if (nodeHeightPrior == CONSTANT) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "constant.popSize"), true);
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "exponential.popSize"), true);
-            if (parameterization == GROWTH_RATE) {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "exponential.growthRate"), true);
-            } else {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "exponential.doublingTime"), true);
-            }
-        } else if (nodeHeightPrior == LOGISTIC) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "logistic.popSize"), true);
-            if (parameterization == GROWTH_RATE) {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "logistic.growthRate"), true);
-            } else {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "logistic.doublingTime"), true);
-            }
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "logistic.t50"), true);
-        } else if (nodeHeightPrior == EXPANSION) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "expansion.popSize"), true);
-            if (parameterization == GROWTH_RATE) {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "expansion.growthRate"), true);
-            } else {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "expansion.doublingTime"), true);
-            }
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "expansion.ancestralProportion"), true);
-        } else if (nodeHeightPrior == SKYLINE) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "skyline.popSize"), true);
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "skyline.groupSize"), true);
-        } else if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            writeSumStatisticColumn(writer, "demographic.populationSizeChanges", "popSize_changes");
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "demographic.populationMean"), true);
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "demographic.popSize"), true);
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "demographic.indicators"), true);
-        } else if (nodeHeightPrior == YULE) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "yule.birthRate"), true);
-        } else if (nodeHeightPrior == BIRTH_DEATH) {
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME), true);
-            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME), true);
-        }
-
-        if (alignment != null) {
-            switch (dataType) {
-                case DataType.NUCLEOTIDES:
-                    if (partitionCount > 1) {
-                        for (int i = 1; i <= partitionCount; i++) {
-                            writer.writeTag(ParameterParser.PARAMETER,
-                                    new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + i + ".mu"), true);
-                        }
-                    }
-                    switch (nucSubstitutionModel) {
-                        case HKY:
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "hky" + i + ".kappa"), true);
-                                }
-                            } else {
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "hky.kappa"), true);
-                            }
-                            break;
-
-                        case GTR:
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + i + ".ac"), true);
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + i + ".ag"), true);
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + i + ".at"), true);
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + i + ".cg"), true);
-                                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr" + i + ".gt"), true);
-                                }
-                            } else {
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr.ac"), true);
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr.ag"), true);
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr.at"), true);
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr.cg"), true);
-                                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "gtr.gt"), true);
-                            }
-                            break;
-                    }
-                    break;//NUCLEOTIDES
-
-                case DataType.AMINO_ACIDS:
-                    break;//AMINO_ACIDS
-
-                case DataType.TWO_STATES:
-                case DataType.COVARION:
-
-                    switch (binarySubstitutionModel) {
-                        case BIN_SIMPLE:
-                            break;
-                        case BIN_COVARION:
-                            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "bcov.alpha"), true);
-                            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "bcov.s"), true);
-                            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "bcov.frequencies"), true);
-                            writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, "bcov.hfrequencies"), true);
-                            break;
-
-                    }
-                    break;//BINARY
-            }
-
-            if (gammaHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        writer.writeTag(ParameterParser.PARAMETER,
-                                new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + i + ".alpha"), true);
-                    }
-                } else {
-                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + "." + "alpha"), true);
-                }
-            }
-
-            if (invarHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        writer.writeTag(ParameterParser.PARAMETER,
-                                new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + i + ".pInv"), true);
-                    }
-                } else {
-                    writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, SiteModel.SITE_MODEL + "." + "pInv"), true);
-                }
-            }
-        }
-
-        if (clockModel != STRICT_CLOCK) {
-//			if (!fixedSubstitutionRate) {
-            if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, UCED_MEAN), true);
-            } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, UCLD_MEAN), true);
-                writer.writeTag(ParameterParser.PARAMETER, new Attribute.Default<String>(XMLParser.IDREF, UCLD_STDEV), true);
-            }
-//			}
-            writer.writeTag(RateStatisticParser.RATE_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, RateStatisticParser.COEFFICIENT_OF_VARIATION), true);
-            writer.writeTag(RateCovarianceStatisticParser.RATE_COVARIANCE_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, "covariance"), true);
-
-            if (clockModel == RANDOM_LOCAL_CLOCK) {
-                writer.writeTag(SumStatisticParser.SUM_STATISTIC, new Attribute.Default<String>(XMLParser.IDREF, "rateChanges"), true);
-            }
-        }
-
-        if (alignment != null) {
-            boolean nucs = alignment.getDataType() == Nucleotides.INSTANCE;
-            if (nucs && partitionCount > 1) {
-                for (int i = 1; i <= partitionCount; i++) {
-                    writer.writeTag(TreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, TreeLikelihoodParser.TREE_LIKELIHOOD + i), true);
-                }
-            } else
-                writer.writeTag(TreeLikelihoodParser.TREE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, TreeLikelihoodParser.TREE_LIKELIHOOD), true);
-        }
-        if (nodeHeightPrior == YULE || nodeHeightPrior == BIRTH_DEATH) {
-            writer.writeTag(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "speciation"), true);
-        } else if (nodeHeightPrior == SKYLINE) {
-            writer.writeTag(BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "skyline"), true);
-        } else {
-            writer.writeTag(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, new Attribute.Default<String>(XMLParser.IDREF, "coalescent"), true);
-        }
-
-
-    }
-
-    /**
-     * fix a parameter
-     *
-     * @param id    the id
-     * @param value the value
-     */
-    public void fixParameter(String id, double value) {
-        Parameter parameter = parameters.get(id);
-        if (parameter == null) {
-            throw new IllegalArgumentException("parameter with name, " + id + ", is unknown");
-        }
-        parameter.isFixed = true;
-        parameter.initial = value;
-    }
-
-
-    private String multiDimensionValue(int dimension, double value) {
-        String multi = "";
-
-        multi += value + "";
-        for (int i = 2; i <= dimension; i++)
-            multi += " " + value;
-
-        return multi;
-    }
-
-    /**
-     * write a parameter
-     *
-     * @param id     the id
-     * @param writer the writer
-     */
-    public void writeParameter(String id, XMLWriter writer) {
-        Parameter parameter = parameters.get(id);
-        if (parameter == null) {
-            throw new IllegalArgumentException("parameter with name, " + id + ", is unknown");
-        }
-        if (parameter.isFixed) {
-            writeParameter(id, 1, parameter.initial, Double.NaN, Double.NaN, writer);
-        } else {
-            if (parameter.priorType == PriorType.UNIFORM_PRIOR || parameter.priorType == PriorType.TRUNC_NORMAL_PRIOR) {
-                writeParameter(id, 1, parameter.initial, parameter.uniformLower, parameter.uniformUpper, writer);
-            } else {
-                writeParameter(id, 1, parameter.initial, parameter.lower, parameter.upper, writer);
-            }
-        }
-    }
-
-    /**
-     * write a parameter
-     *
-     * @param id        the id
-     * @param dimension the dimension
-     * @param writer    the writer
-     */
-    public void writeParameter(String id, int dimension, XMLWriter writer) {
-        Parameter parameter = parameters.get(id);
-        if (parameter == null) {
-            throw new IllegalArgumentException("parameter with name, " + id + ", is unknown");
-        }
-        if (parameter.isFixed) {
-            writeParameter(id, dimension, parameter.initial, Double.NaN, Double.NaN, writer);
-        } else if (parameter.priorType == PriorType.UNIFORM_PRIOR || parameter.priorType == PriorType.TRUNC_NORMAL_PRIOR) {
-            writeParameter(id, dimension, parameter.initial, parameter.uniformLower, parameter.uniformUpper, writer);
-        } else {
-            writeParameter(id, dimension, parameter.initial, parameter.lower, parameter.upper, writer);
-        }
-    }
-
-    /**
-     * write a parameter
-     *
-     * @param id        the id
-     * @param dimension the dimension
-     * @param value     the value
-     * @param lower     the lower bound
-     * @param upper     the upper bound
-     * @param writer    the writer
-     */
-    public void writeParameter(String id, int dimension, double value, double lower, double upper, XMLWriter writer) {
-        ArrayList<Attribute.Default> attributes = new ArrayList<Attribute.Default>();
-        attributes.add(new Attribute.Default<String>(XMLParser.ID, id));
-        if (dimension > 1) {
-            attributes.add(new Attribute.Default<String>("dimension", dimension + ""));
-        }
-        if (!Double.isNaN(value)) {
-            attributes.add(new Attribute.Default<String>("value", multiDimensionValue(dimension, value)));
-        }
-        if (!Double.isNaN(lower)) {
-            attributes.add(new Attribute.Default<String>("lower", multiDimensionValue(dimension, lower)));
-        }
-        if (!Double.isNaN(upper)) {
-            attributes.add(new Attribute.Default<String>("upper", multiDimensionValue(dimension, upper)));
-        }
-
-        Attribute[] attrArray = new Attribute[attributes.size()];
-        for (int i = 0; i < attrArray.length; i++) {
-            attrArray[i] = attributes.get(i);
-        }
-
-        writer.writeTag(ParameterParser.PARAMETER, attrArray, true);
-    }
-
-    /**
-     * Generate XML for the starting tree
-     *
-     * @param writer the writer
-     */
-    public void writeStartingTree(XMLWriter writer) {
-        if (userTree) {
-            writeUserTree(tree, writer);
-        } else if (upgmaStartingTree) {
-            // generate a upgma starting tree
-            writer.writeComment("Construct a rough-and-ready UPGMA tree as an starting tree");
-            Parameter rootHeight = getParameter("treeModel.rootHeight");
-            if (rootHeight.priorType != PriorType.NONE) {
-                writer.writeOpenTag(
-                        UPGMATreeParser.UPGMA_TREE,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, InitialTreeGenerator.STARTING_TREE),
-                                new Attribute.Default<String>(UPGMATreeParser.ROOT_HEIGHT, "" + rootHeight.initial)
-                        }
-                );
-            } else {
-                writer.writeOpenTag(
-                        UPGMATreeParser.UPGMA_TREE,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, InitialTreeGenerator.STARTING_TREE)
-                        }
-                );
-            }
-            writer.writeOpenTag(
-                    DistanceMatrixParser.DISTANCE_MATRIX,
-                    new Attribute[]{
-                            new Attribute.Default<String>(DistanceMatrixParser.CORRECTION, "JC")
-                    }
-            );
-            writer.writeOpenTag(SitePatternsParser.PATTERNS);
-            writer.writeTag(AlignmentParser.ALIGNMENT, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, AlignmentParser.ALIGNMENT)}, true);
-            writer.writeCloseTag(SitePatternsParser.PATTERNS);
-            writer.writeCloseTag(DistanceMatrixParser.DISTANCE_MATRIX);
-            writer.writeCloseTag(UPGMATreeParser.UPGMA_TREE);
-        } else {
-            // generate a coalescent tree
-            writer.writeComment("Generate a random starting tree under the coalescent process");
-            Parameter rootHeight = getParameter("treeModel.rootHeight");
-            if (rootHeight.priorType != PriorType.NONE) {
-                writer.writeOpenTag(
-                        OldCoalescentSimulatorParser.COALESCENT_TREE,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, InitialTreeGenerator.STARTING_TREE),
-                                new Attribute.Default<String>(TreeModelParser.ROOT_HEIGHT, "" + rootHeight.initial)
-                        }
-                );
-            } else {
-                writer.writeOpenTag(
-                        OldCoalescentSimulatorParser.COALESCENT_TREE,
-                        new Attribute[]{
-                                new Attribute.Default<String>(XMLParser.ID, InitialTreeGenerator.STARTING_TREE)
-                        }
-                );
-            }
-
-            Attribute[] taxaAttribute = {new Attribute.Default<String>(XMLParser.IDREF, TaxaParser.TAXA)};
-            if (taxonSets.size() > 0) {
-                writer.writeOpenTag(OldCoalescentSimulatorParser.CONSTRAINED_TAXA);
-                writer.writeTag(TaxaParser.TAXA, taxaAttribute, true);
-                for (Taxa taxonSet : taxonSets) {
-                    Parameter statistic = statistics.get(taxonSet);
-
-                    Attribute mono = new Attribute.Default<Boolean>(OldCoalescentSimulatorParser.IS_MONOPHYLETIC, taxonSetsMono.get(taxonSet));
-
-                    writer.writeOpenTag(OldCoalescentSimulatorParser.TMRCA_CONSTRAINT, mono);
-
-                    writer.writeTag(TaxaParser.TAXA,
-                            new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, taxonSet.getId())}, true);
-                    if (statistic.isNodeHeight) {
-                        if (statistic.priorType == PriorType.UNIFORM_PRIOR || statistic.priorType == PriorType.TRUNC_NORMAL_PRIOR) {
-                            writer.writeOpenTag(UniformDistributionModelParser.UNIFORM_DISTRIBUTION_MODEL);
-                            writer.writeTag(UniformDistributionModelParser.LOWER, new Attribute[]{}, "" + statistic.uniformLower, true);
-                            writer.writeTag(UniformDistributionModelParser.UPPER, new Attribute[]{}, "" + statistic.uniformUpper, true);
-                            writer.writeCloseTag(UniformDistributionModelParser.UNIFORM_DISTRIBUTION_MODEL);
-                        }
-                    }
-
-                    writer.writeCloseTag(OldCoalescentSimulatorParser.TMRCA_CONSTRAINT);
-                }
-                writer.writeCloseTag(OldCoalescentSimulatorParser.CONSTRAINED_TAXA);
-            } else {
-                writer.writeTag(TaxaParser.TAXA, taxaAttribute, true);
-            }
-
-            writeInitialDemoModelRef(writer);
-            writer.writeCloseTag(OldCoalescentSimulatorParser.COALESCENT_TREE);
-        }
-    }
-
-    public void writeInitialDemoModelRef(XMLWriter writer) {
-        if (nodeHeightPrior == CONSTANT) {
-            writer.writeTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "constant")}, true);
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            writer.writeTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "exponential")}, true);
-        } else {
-            writer.writeTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "initialDemo")}, true);
-        }
-    }
-
-    public void writeNodeHeightPriorModelRef(XMLWriter writer) {
-        if (nodeHeightPrior == CONSTANT) {
-            writer.writeTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "constant")}, true);
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            writer.writeTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "exponential")}, true);
-        } else if (nodeHeightPrior == LOGISTIC) {
-            writer.writeTag(LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "logistic")}, true);
-        } else if (nodeHeightPrior == EXPANSION) {
-            writer.writeTag(ExpansionModelParser.EXPANSION_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "expansion")}, true);
-        } else if (nodeHeightPrior == SKYLINE) {
-            writer.writeTag(BayesianSkylineLikelihoodParser.SKYLINE_LIKELIHOOD, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "skyline")}, true);
-        } else if (nodeHeightPrior == YULE) {
-            writer.writeTag(YuleModelParser.YULE_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "yule")}, true);
-        } else if (nodeHeightPrior == BIRTH_DEATH) {
-            writer.writeTag(BirthDeathGernhard08Model.BIRTH_DEATH_MODEL, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, "birthDeath")}, true);
-        } else {
-            throw new RuntimeException("No coalescent model has been specified so cannot refer to it");
-        }
-    }
-
-    /**
-     * Generate XML for the user tree
-     *
-     * @param tree   the user tree
-     * @param writer the writer
-     */
-    private void writeUserTree(Tree tree, XMLWriter writer) {
-
-        writer.writeComment("The starting tree.");
-        writer.writeOpenTag(
-                "tree",
-                new Attribute[]{
-                        new Attribute.Default<String>("height", InitialTreeGenerator.STARTING_TREE),
-                        new Attribute.Default<String>("usingDates", (maximumTipHeight > 0 ? "true" : "false"))
-                }
-        );
-        writeNode(tree, tree.getRoot(), writer);
-        writer.writeCloseTag("tree");
-    }
-
-    /**
-     * Generate XML for the node of a user tree.
-     *
-     * @param tree   the user tree
-     * @param node   the current node
-     * @param writer the writer
-     */
-    private void writeNode(Tree tree, NodeRef node, XMLWriter writer) {
-
-        writer.writeOpenTag(
-                "node",
-                new Attribute[]{new Attribute.Default<String>("height", "" + tree.getNodeHeight(node))}
-        );
-
-        if (tree.getChildCount(node) == 0) {
-            writer.writeTag(TaxonParser.TAXON, new Attribute[]{new Attribute.Default<String>(XMLParser.IDREF, tree.getNodeTaxon(node).getId())}, true);
-        }
-        for (int i = 0; i < tree.getChildCount(node); i++) {
-            writeNode(tree, tree.getChild(node, i), writer);
-        }
-        writer.writeCloseTag("node");
-    }
-}
-
diff --git a/src/dr/app/oldbeauti/BeautiApp.java b/src/dr/app/oldbeauti/BeautiApp.java
deleted file mode 100644
index b411764..0000000
--- a/src/dr/app/oldbeauti/BeautiApp.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * BeautiApp.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.beast.BeastVersion;
-import dr.app.util.OSType;
-import dr.util.Version;
-import jam.framework.*;
-
-import javax.swing.*;
-import java.awt.*;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @version $Id: BeautiApp.java,v 1.18 2006/09/09 16:07:05 rambaut Exp $
- */
-public class BeautiApp extends MultiDocApplication {
-    private final static Version version = new BeastVersion();
-
-    public BeautiApp(String nameString, String aboutString, Icon icon,
-                     String websiteURLString, String helpURLString) {
-        super(new BeautiMenuBarFactory(), nameString, aboutString, icon, websiteURLString, helpURLString);
-    }
-
-    /**
-     * In a departure from the standard UI, there is no "Open" command for this application
-     * Instead, the user can create a New window, Import a NEXUS file and Apply a Template file.
-     * None of these operations result in a file being associated with the DocumentFrame. All
-     * these actions are located in the BeautiFrame class. This overriden method should never
-     * be called and throw a RuntimeException if it is.
-     *
-     * @return the action
-     */
-    public Action getOpenAction() {
-        throw new UnsupportedOperationException("getOpenAction is not supported");
-    }
-
-    // Main entry point
-    static public void main(String[] args) {
-
-
-        if (args.length > 1) {
-
-            if (args.length != 3) {
-                System.err.println("Usage: beauti <input_file> <template_file> <output_file>");
-                return;
-            }
-
-            String inputFileName = args[0];
-            String templateFileName = args[1];
-            String outputFileName = args[2];
-
-            new CommandLineBeauti(inputFileName, templateFileName, outputFileName);
-
-        } else {
-
-            if (args.length == 1 && args[0].equalsIgnoreCase("-developer")) {
-                developer = true;
-            }
-
-	        if (OSType.isMac()) {
-		        System.setProperty("apple.laf.useScreenMenuBar","true");
-		        System.setProperty("apple.awt.showGrowBox","true");
-		        System.setProperty("apple.awt.graphics.UseQuartz","true");
-		        UIManager.put("SystemFont", new Font("Lucida Grande", Font.PLAIN, 13));
-		        UIManager.put("SmallSystemFont", new Font("Lucida Grande", Font.PLAIN, 11));
-	        }
-
-            try {
-
-                UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
-
-                java.net.URL url = BeautiApp.class.getResource("images/beauti.png");
-                Icon icon = null;
-
-                if (url != null) {
-                    icon = new ImageIcon(url);
-                }
-
-                final String nameString = "Old BEAUti";
-                final String versionString = version.getVersionString();
-                String aboutString = "<html><div style=\"font-family:sans-serif;\"><center>" +
-                        "<div style=\"font-size:12;\"><p>(Old) Bayesian Evolutionary Analysis Utility<br>" +
-                        "Version " + versionString + ", " + version.getDateString() + "</p>" +
-                        "<p>by Andrew Rambaut and Alexei J. Drummond</p></div>" +
-                                "<hr><div style=\"font-size:10;\">Part of the BEAST package:" +
-                        version.getHTMLCredits() +
-                        "</div></center></div></html>";
-
-                String websiteURLString = "http://beast.bio.ed.ac.uk/";
-                String helpURLString = "http://beast.bio.ed.ac.uk/BEAUti/";
-
-                BeautiApp app = new BeautiApp(nameString, aboutString, icon,
-                        websiteURLString, helpURLString);
-                app.setDocumentFrameFactory(new DocumentFrameFactory() {
-                    public DocumentFrame createDocumentFrame(Application app, MenuBarFactory menuBarFactory) {
-                        return new BeautiFrame(nameString);
-                    }
-                });
-                app.initialize();
-                app.doNew();
-            } catch (Exception e) {
-                JOptionPane.showMessageDialog(new JFrame(), "Fatal exception: " + e,
-                        "Please report this to the authors",
-                        JOptionPane.ERROR_MESSAGE);
-                e.printStackTrace();
-            }
-        }
-    }
-
-    public static boolean developer = false;
-}
diff --git a/src/dr/app/oldbeauti/BeautiFrame.java b/src/dr/app/oldbeauti/BeautiFrame.java
deleted file mode 100644
index 97dc373..0000000
--- a/src/dr/app/oldbeauti/BeautiFrame.java
+++ /dev/null
@@ -1,656 +0,0 @@
-/*
- * BeautiFrame.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-/*
- * BeautiFrame.java
- *
- * (c) 2002-2005 BEAST Development Core Team
- *
- * This package may be distributed under the
- * Lesser Gnu Public Licence (LGPL)
- */
-package dr.app.oldbeauti;
-
-import dr.evolution.alignment.Patterns;
-import dr.evolution.alignment.SimpleAlignment;
-import dr.evolution.distance.DistanceMatrix;
-import dr.evolution.distance.JukesCantorDistanceMatrix;
-import dr.evolution.io.Importer;
-import dr.evolution.io.NexusImporter;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.Units;
-import org.jdom.Document;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.XMLOutputter;
-import jam.framework.DocumentFrame;
-import jam.framework.Exportable;
-import jam.util.IconUtils;
-
-import javax.swing.*;
-import javax.swing.event.ChangeEvent;
-import javax.swing.event.ChangeListener;
-import javax.swing.plaf.BorderUIResource;
-import java.awt.*;
-import java.awt.event.ActionEvent;
-import java.io.*;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: BeautiFrame.java,v 1.22 2006/09/09 16:07:06 rambaut Exp $
- */
-public class BeautiFrame extends DocumentFrame {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = 2114148696789612509L;
-
-    private final BeastGenerator beautiOptions = new BeastGenerator();
-
-    private final JTabbedPane tabbedPane = new JTabbedPane();
-    private final JLabel statusLabel = new JLabel("No data loaded");
-
-    private DataPanel dataPanel;
-    private TaxaPanel taxaPanel;
-    private ModelPanel modelPanel;
-    private PriorsPanel priorsPanel;
-    private OperatorsPanel operatorsPanel;
-    private MCMCPanel mcmcPanel;
-
-//	final Icon dataIcon = new ImageIcon(Utils.getImage(this, "/images/data-icon.gif"));
-//	final Icon modelIcon = new ImageIcon(Utils.getImage(this, "/images/model-icon.gif"));
-//	final Icon mcmcIcon = new ImageIcon(Utils.getImage(this, "/images/mcmc-icon.gif"));
-
-    final Icon gearIcon = IconUtils.getIcon(this.getClass(), "images/gear.png");
-
-    public BeautiFrame(String title) {
-        super();
-
-        setTitle(title);
-
-        // Prevent the application to close in requestClose()
-        // after a user cancel or a failure in beast file generation
-        setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
-
-        getOpenAction().setEnabled(false);
-        getSaveAction().setEnabled(false);
-
-        getFindAction().setEnabled(false);
-
-        getZoomWindowAction().setEnabled(false);
-    }
-
-    public void initializeComponents() {
-
-        dataPanel = new DataPanel(this);
-        taxaPanel = new TaxaPanel(this);
-        modelPanel = new ModelPanel(this);
-        priorsPanel = new PriorsPanel(this);
-        operatorsPanel = new OperatorsPanel(this);
-        mcmcPanel = new MCMCPanel(this);
-
-        tabbedPane.addTab("Data", dataPanel);
-        tabbedPane.addTab("Taxa", taxaPanel);
-        tabbedPane.addTab("Model", modelPanel);
-        tabbedPane.addTab("Priors", priorsPanel);
-        tabbedPane.addTab("Operators", operatorsPanel);
-        tabbedPane.addTab("MCMC", mcmcPanel);
-        tabbedPane.addChangeListener(new ChangeListener() {
-            public void stateChanged(ChangeEvent e) {
-                if (tabbedPane.getSelectedComponent() == dataPanel) {
-                    dataPanel.selectionChanged();
-                } else {
-                    getDeleteAction().setEnabled(false);
-                }
-            }
-        });
-
-        JPanel panel = new JPanel(new BorderLayout(6, 6));
-        panel.setBorder(new BorderUIResource.EmptyBorderUIResource(new java.awt.Insets(12, 12, 12, 12)));
-        panel.add(tabbedPane, BorderLayout.CENTER);
-
-        getExportAction().setEnabled(false);
-        JButton generateButton = new JButton(getExportAction());
-	    generateButton.putClientProperty("JButton.buttonType", "roundRect");
-
-        JPanel panel2 = new JPanel(new BorderLayout(6, 6));
-        panel2.add(statusLabel, BorderLayout.CENTER);
-        panel2.add(generateButton, BorderLayout.EAST);
-
-        panel.add(panel2, BorderLayout.SOUTH);
-
-        getContentPane().setLayout(new java.awt.BorderLayout(0, 0));
-        getContentPane().add(panel, BorderLayout.CENTER);
-
-        dataPanel.setOptions(beautiOptions);
-        taxaPanel.setOptions(beautiOptions);
-        modelPanel.setOptions(beautiOptions);
-        priorsPanel.setOptions(beautiOptions);
-        operatorsPanel.setOptions(beautiOptions);
-        mcmcPanel.setOptions(beautiOptions);
-
-        setSize(new java.awt.Dimension(800, 600));
-    }
-
-    public final void dataChanged() {
-        taxaPanel.setOptions(beautiOptions);
-        modelPanel.setOptions(beautiOptions);
-        priorsPanel.setOptions(beautiOptions);
-        operatorsPanel.setOptions(beautiOptions);
-        setDirty();
-    }
-
-    public final void dataSelectionChanged(boolean isSelected) {
-        if (isSelected) {
-            getDeleteAction().setEnabled(true);
-        } else {
-            getDeleteAction().setEnabled(false);
-        }
-    }
-
-    public void taxonSetsChanged() {
-        priorsPanel.setOptions(beautiOptions);
-        setDirty();
-    }
-
-
-    public void doDelete() {
-        if (tabbedPane.getSelectedComponent() == dataPanel) {
-            dataPanel.deleteSelection();
-        } else {
-            throw new RuntimeException("Delete should only be accessable from the Data panel");
-        }
-    }
-
-    public final void modelChanged() {
-        modelPanel.getOptions(beautiOptions);
-
-        priorsPanel.setOptions(beautiOptions);
-        operatorsPanel.setOptions(beautiOptions);
-        setDirty();
-    }
-
-    public final void operatorsChanged() {
-        setDirty();
-    }
-
-    public final void priorsChanged() {
-        priorsPanel.getOptions(beautiOptions);
-
-        operatorsPanel.setOptions(beautiOptions);
-
-        priorsPanel.setOptions(beautiOptions);
-
-        setDirty();
-    }
-
-    public final void mcmcChanged() {
-        setDirty();
-    }
-
-    public boolean requestClose() {
-        if (isDirty()) {
-            int option = JOptionPane.showConfirmDialog(this,
-                    "You have made changes but have not generated\n" +
-                            "a BEAST XML file. Do you wish to generate\n" +
-                            "before closing this window?",
-                    "Unused changes",
-                    JOptionPane.YES_NO_CANCEL_OPTION,
-                    JOptionPane.WARNING_MESSAGE);
-
-            if (option == JOptionPane.YES_OPTION) {
-                return !doGenerate();
-            } else if (option == JOptionPane.CANCEL_OPTION || option == JOptionPane.DEFAULT_OPTION) {
-                return false;
-            }
-            return true;
-        }
-        return true;
-    }
-
-    public void doApplyTemplate() {
-        FileDialog dialog = new FileDialog(this,
-                "Apply Template",
-                FileDialog.LOAD);
-        dialog.setVisible(true);
-        if (dialog.getFile() != null) {
-            File file = new File(dialog.getDirectory(), dialog.getFile());
-            try {
-                readFromFile(file);
-            } catch (FileNotFoundException fnfe) {
-                JOptionPane.showMessageDialog(this, "Unable to open template file: File not found",
-                        "Unable to open file",
-                        JOptionPane.ERROR_MESSAGE);
-            } catch (IOException ioe) {
-                JOptionPane.showMessageDialog(this, "Unable to read template file: " + ioe,
-                        "Unable to read file",
-                        JOptionPane.ERROR_MESSAGE);
-            }
-        }
-    }
-
-    protected boolean readFromFile(File file) throws IOException {
-        try {
-            SAXBuilder parser = new SAXBuilder();
-            Document doc = parser.build(file);
-            beautiOptions.parse(doc);
-
-            if (beautiOptions.guessDates) {
-                beautiOptions.guessDates();
-            }
-
-            dataPanel.setOptions(beautiOptions);
-            taxaPanel.setOptions(beautiOptions);
-            modelPanel.setOptions(beautiOptions);
-            priorsPanel.setOptions(beautiOptions);
-            operatorsPanel.setOptions(beautiOptions);
-            mcmcPanel.setOptions(beautiOptions);
-
-            getExportAction().setEnabled(beautiOptions.alignment != null);
-            getSaveAction().setEnabled(beautiOptions.alignment != null);
-            getSaveAsAction().setEnabled(beautiOptions.alignment != null);
-
-        } catch (dr.xml.XMLParseException xpe) {
-            JOptionPane.showMessageDialog(this, "Error reading file: This may not be a BEAUti file",
-                    "Error reading file",
-                    JOptionPane.ERROR_MESSAGE);
-            return false;
-        } catch (JDOMException e) {
-            JOptionPane.showMessageDialog(this, "Unable to open file: This may not be a BEAUti file",
-                    "Unable to open file",
-                    JOptionPane.ERROR_MESSAGE);
-            return false;
-        }
-        return true;
-    }
-
-    public String getDefaultFileName() { return beautiOptions.fileNameStem+".beauti"; }
-
-    protected boolean writeToFile(File file) throws IOException {
-        dataPanel.getOptions(beautiOptions);
-        taxaPanel.getOptions(beautiOptions);
-        modelPanel.getOptions(beautiOptions);
-        priorsPanel.getOptions(beautiOptions);
-        operatorsPanel.getOptions(beautiOptions);
-        mcmcPanel.getOptions(beautiOptions);
-
-        Document doc = beautiOptions.create(false, true);
-
-        FileWriter fw = new FileWriter(file);
-
-        XMLOutputter outputter = new XMLOutputter(org.jdom.output.Format.getPrettyFormat());
-
-        outputter.output(doc, fw);
-
-        fw.close();
-        return true;
-    }
-
-    public final void doImport() {
-
-        FileDialog dialog = new FileDialog(this,
-                "Import NEXUS File...",
-                FileDialog.LOAD);
-
-        dialog.setVisible(true);
-        if (dialog.getFile() != null) {
-            File file = new File(dialog.getDirectory(), dialog.getFile());
-
-            try {
-                importFromFile(file);
-
-                setDirty();
-            } catch (FileNotFoundException fnfe) {
-                JOptionPane.showMessageDialog(this, "Unable to open file: File not found",
-                        "Unable to open file",
-                        JOptionPane.ERROR_MESSAGE);
-            } catch (IOException ioe) {
-                JOptionPane.showMessageDialog(this, "Unable to read file: " + ioe,
-                        "Unable to read file",
-                        JOptionPane.ERROR_MESSAGE);
-            }
-        }
-
-    }
-
-    protected void importFromFile(File file) throws IOException {
-
-        try {
-            FileReader reader = new FileReader(file);
-
-            NexusApplicationImporter importer = new NexusApplicationImporter(reader);
-
-            boolean done = false;
-
-            beautiOptions.originalAlignment = null;
-            beautiOptions.alignment = null;
-            beautiOptions.tree = null;
-            beautiOptions.taxonList = null;
-
-            while (!done) {
-                try {
-
-                    NexusImporter.NexusBlock block = importer.findNextBlock();
-
-                    if (block == NexusImporter.TAXA_BLOCK) {
-
-                        if (beautiOptions.taxonList != null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block already defined");
-                        }
-
-                        beautiOptions.taxonList = importer.parseTaxaBlock();
-
-                    } else if (block == NexusImporter.CALIBRATION_BLOCK) {
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a CALIBRATION block");
-                        }
-
-                        importer.parseCalibrationBlock(beautiOptions.taxonList);
-
-                    } else if (block == NexusImporter.CHARACTERS_BLOCK) {
-
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block must be defined before a CHARACTERS block");
-                        }
-
-                        if (beautiOptions.originalAlignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        beautiOptions.originalAlignment = (SimpleAlignment)importer.parseCharactersBlock(beautiOptions.taxonList);
-
-                    } else if (block == NexusImporter.DATA_BLOCK) {
-
-                        if (beautiOptions.originalAlignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        // A data block doesn't need a taxon block before it
-                        // but if one exists then it will use it.
-                        beautiOptions.originalAlignment = (SimpleAlignment)importer.parseDataBlock(beautiOptions.taxonList);
-                        if (beautiOptions.taxonList == null) {
-                            beautiOptions.taxonList = beautiOptions.originalAlignment;
-                        }
-
-                    } else if (block == NexusImporter.TREES_BLOCK) {
-
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a TREES block");
-                        }
-
-                        if (beautiOptions.tree != null) {
-                            throw new NexusImporter.MissingBlockException("TREES block already defined");
-                        }
-
-                        Tree[] trees = importer.parseTreesBlock(beautiOptions.taxonList);
-                        if (trees.length > 0) {
-                            beautiOptions.tree = trees[0];
-                        }
-
-/*					} else if (block == NexusApplicationImporter.PAUP_BLOCK) {
-
-						importer.parsePAUPBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.MRBAYES_BLOCK) {
-
-						importer.parseMrBayesBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.RHINO_BLOCK) {
-
-						importer.parseRhinoBlock(beautiOptions);
-*/
-                    } else {
-                        // Ignore the block..
-                    }
-
-                } catch (EOFException ex) {
-                    done = true;
-                }
-            }
-
-            // Allow the user to load taxa only (perhaps from a tree file) so that they can sample from a prior...
-            if (beautiOptions.originalAlignment == null && beautiOptions.taxonList == null) {
-                throw new NexusImporter.MissingBlockException("TAXON, DATA or CHARACTERS block is missing");
-            }
-
-        } catch (Importer.ImportException ime) {
-            JOptionPane.showMessageDialog(this, "Error parsing imported file: " + ime,
-                    "Error reading file",
-                    JOptionPane.ERROR_MESSAGE);
-            return;
-        } catch (IOException ioex) {
-            JOptionPane.showMessageDialog(this, "File I/O Error: " + ioex,
-                    "File I/O Error",
-                    JOptionPane.ERROR_MESSAGE);
-            return;
-        } catch (Exception ex) {
-            JOptionPane.showMessageDialog(this, "Fatal exception: " + ex,
-                    "Error reading file",
-                    JOptionPane.ERROR_MESSAGE);
-            return;
-        }
-
-        // check the taxon names for invalid characters
-        boolean foundAmp = false;
-        for (int i = 0; i < beautiOptions.taxonList.getTaxonCount(); i++) {
-            String name = beautiOptions.taxonList.getTaxon(i).getId();
-            if (name.indexOf('&') >= 0) {
-                foundAmp = true;
-            }
-        }
-        if (foundAmp) {
-            JOptionPane.showMessageDialog(this, "One or more taxon names include an illegal character ('&').\n" +
-                    "These characters will prevent BEAST from reading the resulting XML file.\n\n" +
-                    "Please edit the taxon name(s) before generating the BEAST file.",
-                    "Illegal Taxon Name(s)",
-                    JOptionPane.WARNING_MESSAGE);
-        }
-
-
-        // make sure they all have dates...
-        for (int i = 0; i < beautiOptions.taxonList.getTaxonCount(); i++) {
-            if (beautiOptions.taxonList.getTaxonAttribute(i, "date") == null) {
-                java.util.Date origin = new java.util.Date(0);
-
-                dr.evolution.util.Date date = dr.evolution.util.Date.createTimeSinceOrigin(0.0, Units.Type.YEARS, origin);
-                beautiOptions.taxonList.getTaxon(i).setAttribute("date", date);
-            }
-        }
-
-        beautiOptions.fileNameStem = dr.app.util.Utils.trimExtensions(file.getName(),
-                new String[] {"nex", "NEX", "tre", "TRE", "nexus", "NEXUS"});
-
-        beautiOptions.alignment = beautiOptions.originalAlignment;
-        beautiOptions.alignmentReset = true;
-        if (beautiOptions.alignment != null) {
-            Patterns patterns = new Patterns(beautiOptions.alignment);
-            DistanceMatrix distances = new JukesCantorDistanceMatrix(patterns);
-            beautiOptions.meanDistance = distances.getMeanDistance();
-
-            statusLabel.setText("Alignment: " + beautiOptions.alignment.getTaxonCount() + " taxa, " +
-                    beautiOptions.alignment.getSiteCount() + " sites");
-            beautiOptions.dataType = beautiOptions.alignment.getDataType().getType();
-        } else {
-            statusLabel.setText("Taxa only: " + beautiOptions.taxonList.getTaxonCount() + " taxa");
-            beautiOptions.meanDistance = 0.0;
-        }
-
-        dataPanel.setOptions(beautiOptions);
-        taxaPanel.setOptions(beautiOptions);
-        modelPanel.setOptions(beautiOptions);
-        priorsPanel.setOptions(beautiOptions);
-        operatorsPanel.setOptions(beautiOptions);
-        mcmcPanel.setOptions(beautiOptions);
-
-
-        getOpenAction().setEnabled(true);
-        getSaveAction().setEnabled(true);
-        getExportAction().setEnabled(true);
-    }
-
-    public final boolean doGenerate() {
-
-        try {
-            beautiOptions.checkOptions();
-        } catch(IllegalArgumentException iae) {
-            JOptionPane.showMessageDialog(this, iae.getMessage(),
-                    "Unable to generate file",
-                    JOptionPane.ERROR_MESSAGE);
-            return false;
-        }
-
-        FileDialog dialog = new FileDialog(this,
-                "Generate BEAST File...",
-                FileDialog.SAVE);
-
-        dialog.setVisible(true);
-        dialog.setFile(beautiOptions.fileNameStem + ".xml");
-        if (dialog.getFile() != null) {
-            File file = new File(dialog.getDirectory(), dialog.getFile());
-
-            try {
-                generate(file);
-
-            } catch (IOException ioe) {
-                JOptionPane.showMessageDialog(this, "Unable to generate file: " + ioe.getMessage(),
-                        "Unable to generate file",
-                        JOptionPane.ERROR_MESSAGE);
-                return false;
-            }
-        }
-
-        clearDirty();
-        return true;
-    }
-
-    protected void generate(File file) throws IOException {
-        dataPanel.getOptions(beautiOptions);
-        taxaPanel.getOptions(beautiOptions);
-        modelPanel.getOptions(beautiOptions);
-        priorsPanel.getOptions(beautiOptions);
-        operatorsPanel.getOptions(beautiOptions);
-        mcmcPanel.getOptions(beautiOptions);
-
-        FileWriter fw = new FileWriter(file);
-        beautiOptions.generateXML(fw);
-        fw.close();
-    }
-
-    public JComponent getExportableComponent() {
-
-        JComponent exportable = null;
-        Component comp = tabbedPane.getSelectedComponent();
-
-        if (comp instanceof Exportable) {
-            exportable = ((Exportable)comp).getExportableComponent();
-        } else if (comp instanceof JComponent) {
-            exportable = (JComponent)comp;
-        }
-
-        return exportable;
-    }
-
-    public boolean doSave() {
-        return doSaveAs();
-    }
-
-    public boolean doSaveAs() {
-        FileDialog dialog = new FileDialog(this,
-                "Save Template As...",
-                FileDialog.SAVE);
-
-        dialog.setVisible(true);
-        if (dialog.getFile() == null) {
-            // the dialog was cancelled...
-            return false;
-        }
-
-        File file = new File(dialog.getDirectory(), dialog.getFile());
-
-        try {
-            if (writeToFile(file)) {
-
-                clearDirty();
-            }
-        } catch (IOException ioe) {
-            JOptionPane.showMessageDialog(this, "Unable to save file: " + ioe,
-                    "Unable to save file",
-                    JOptionPane.ERROR_MESSAGE);
-        }
-
-        return true;
-    }
-
-    public Action getOpenAction() {
-        return openTemplateAction;
-    }
-
-    private final AbstractAction openTemplateAction = new AbstractAction("Apply Template...") {
-        private static final long serialVersionUID = 2450459627280385426L;
-
-        public void actionPerformed(ActionEvent ae) {
-            doApplyTemplate();
-        }
-    };
-
-    public Action getSaveAction() {
-        return saveAsAction;
-    }
-
-    public Action getSaveAsAction() {
-        return saveAsAction;
-    }
-
-    private final AbstractAction saveAsAction = new AbstractAction("Save Template As...") {
-        private static final long serialVersionUID = 2424923366448459342L;
-
-        public void actionPerformed(ActionEvent ae) {
-            doSaveAs();
-        }
-    };
-
-    public Action getImportAction() { return importNexusAction; }
-
-    protected AbstractAction importNexusAction = new AbstractAction("Import NEXUS...") {
-        private static final long serialVersionUID = 3217702096314745005L;
-
-        public void actionPerformed(java.awt.event.ActionEvent ae) {
-            doImport();
-        }
-    };
-
-    public Action getExportAction() { return generateAction; }
-
-    protected AbstractAction generateAction = new AbstractAction("Generate BEAST File...", gearIcon) {
-        private static final long serialVersionUID = -5329102618630268783L;
-
-        public void actionPerformed(java.awt.event.ActionEvent ae) {
-            doGenerate();
-        }
-    };
-
-}
diff --git a/src/dr/app/oldbeauti/BeautiOptions.java b/src/dr/app/oldbeauti/BeautiOptions.java
deleted file mode 100644
index 74354fb..0000000
--- a/src/dr/app/oldbeauti/BeautiOptions.java
+++ /dev/null
@@ -1,1933 +0,0 @@
-/*
- * BeautiOptions.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.evolution.alignment.Alignment;
-import dr.evolution.alignment.SimpleAlignment;
-import dr.evolution.datatype.AminoAcids;
-import dr.evolution.datatype.DataType;
-import dr.evolution.datatype.Nucleotides;
-import dr.evolution.datatype.TwoStates;
-import dr.evolution.sequence.Sequence;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.*;
-import dr.evomodel.coalescent.VariableDemographicModel;
-import dr.evomodel.sitemodel.SiteModel;
-import dr.evomodelxml.speciation.BirthDeathModelParser;
-import dr.evomodelxml.tree.RateStatisticParser;
-import dr.evoxml.AlignmentParser;
-import dr.evoxml.TaxaParser;
-import dr.evoxml.TaxonParser;
-import dr.util.NumberFormatter;
-import dr.xml.XMLParseException;
-import dr.xml.XMLParser;
-import org.jdom.Document;
-import org.jdom.Element;
-
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- */
-public class BeautiOptions {
-    final public static String LOCAL_CLOCK = "localClock";
-    final public static String UCLD_MEAN = "ucld.mean";
-    final public static String UCLD_STDEV = "ucld.stdev";
-    final public static String UCED_MEAN = "uced.mean";
-
-
-    public BeautiOptions() {
-        double demoWeights = 3.0;
-        double substWeights = 1.0;
-        double rateWeights = 3.0;
-        double branchWeights = 30.0;
-        double treeWeights = 15.0;
-
-        createParameter("tree", "The tree");
-        createParameter("treeModel.internalNodeHeights", "internal node heights of the tree (except the root)");
-        createParameter("treeModel.allInternalNodeHeights", "internal node heights of the tree");
-        createParameter("treeModel.rootHeight", "root height of the tree", true, 1.0, 0.0, Double.POSITIVE_INFINITY);
-
-        createScaleParameter("constant.popSize", "coalescent population size parameter", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-
-        createScaleParameter("exponential.popSize", "coalescent population size parameter", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("exponential.growthRate", "coalescent growth rate parameter", GROWTH_RATE_SCALE, 0.0, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
-        createParameter("exponential.doublingTime", "coalescent doubling time parameter", TIME_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createScaleParameter("logistic.popSize", "coalescent population size parameter", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("logistic.growthRate", "coalescent logistic growth rate parameter", GROWTH_RATE_SCALE, 0.001, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("logistic.doublingTime", "coalescent doubling time parameter", TIME_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("logistic.t50", "logistic shape parameter", T50_SCALE, 0.1, 0.0, Double.POSITIVE_INFINITY);
-        createScaleParameter("expansion.popSize", "coalescent population size parameter", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("expansion.growthRate", "coalescent logistic growth rate parameter", GROWTH_RATE_SCALE, 0.001, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("expansion.doublingTime", "coalescent doubling time parameter", TIME_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("expansion.ancestralProportion", "ancestral population proportion", NONE, 0.1, 0.0, 1.0);
-        createParameter("skyline.popSize", "Bayesian Skyline population sizes", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("skyline.groupSize", "Bayesian Skyline group sizes");
-
-        createParameter("demographic.popSize", "Extended Bayesian Skyline population sizes", TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("demographic.indicators", "Extended Bayesian Skyline population switch");
-        createScaleParameter("demographic.populationMean", "Extended Bayesian Skyline population prior mean", TIME_SCALE, 1, 0, Double.POSITIVE_INFINITY);
-        {
-            final Parameter p = createStatistic("demographic.populationSizeChanges", "Average number of population change points", true);
-            p.priorType = PriorType.POISSON_PRIOR;
-            p.poissonMean = Math.log(2);
-        }
-        createParameter("yule.birthRate", "Yule speciation process birth rate", BIRTH_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-
-//        createParameter("birthDeath.birthRate", "Birth-Death speciation process birth rate", BIRTH_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-//        createParameter("birthDeath.deathRate", "Birth-Death speciation process death rate", BIRTH_RATE_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME, "Birth-Death speciation process rate", BIRTH_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, "Death/Birth speciation process relative death rate", BIRTH_RATE_SCALE, 0.5, 0.0, 1.0);
-        //createParameter("birthDeath.samplingProportion", "Birth-Death speciation process sampling proportion", NONE, 1.0, 0.0, 1.0);
-
-        createParameter("clock.rate", "substitution rate", SUBSTITUTION_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(UCED_MEAN, "uncorrelated exponential relaxed clock mean", SUBSTITUTION_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(UCLD_MEAN, "uncorrelated lognormal relaxed clock mean", SUBSTITUTION_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(UCLD_STDEV, "uncorrelated lognormal relaxed clock stdev", LOG_STDEV_SCALE, 0.1, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("branchRates.categories", "relaxed clock branch rate categories");
-        createParameter(LOCAL_CLOCK + "." + "rates", "random local clock rates", SUBSTITUTION_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter(LOCAL_CLOCK + "." + "changes", "random local clock rate change indicator");
-
-        //Substitution model parameters
-        createParameter("hky.frequencies", "HKY base frequencies", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("hky1.frequencies", "HKY base frequencies for codon position 1", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("hky2.frequencies", "HKY base frequencies for codon position 2", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("hky3.frequencies", "HKY base frequencies for codon position 3", UNITY_SCALE, 0.25, 0.0, 1.0);
-
-        createScaleParameter("hky.kappa", "HKY transition-transversion parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("hky1.kappa", "HKY transition-transversion parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("hky2.kappa", "HKY transition-transversion parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("hky3.kappa", "HKY transition-transversion parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-
-        createParameter("gtr.frequencies", "GTR base frequencies", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("gtr1.frequencies", "GTR base frequencies for codon position 1", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("gtr2.frequencies", "GTR base frequencies for codon position 2", UNITY_SCALE, 0.25, 0.0, 1.0);
-        createParameter("gtr3.frequencies", "GTR base frequencies for codon position 3", UNITY_SCALE, 0.25, 0.0, 1.0);
-
-        createScaleParameter("gtr.ac", "GTR A-C substitution parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr.ag", "GTR A-G substitution parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr.at", "GTR A-T substitution parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr.cg", "GTR C-G substitution parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr.gt", "GTR G-T substitution parameter", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-
-        createScaleParameter("gtr1.ac", "GTR A-C substitution parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr1.ag", "GTR A-G substitution parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr1.at", "GTR A-T substitution parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr1.cg", "GTR C-G substitution parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr1.gt", "GTR G-T substitution parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-
-        createScaleParameter("gtr2.ac", "GTR A-C substitution parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr2.ag", "GTR A-G substitution parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr2.at", "GTR A-T substitution parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr2.cg", "GTR C-G substitution parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr2.gt", "GTR G-T substitution parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-
-        createScaleParameter("gtr3.ac", "GTR A-C substitution parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr3.ag", "GTR A-G substitution parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr3.at", "GTR A-T substitution parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr3.cg", "GTR C-G substitution parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-        createScaleParameter("gtr3.gt", "GTR G-T substitution parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
-
-        createParameter("bsimple.frequencies", "Binary Simple frequencies", UNITY_SCALE, 0.5, 0.0, 1.0);
-
-        createParameter("bcov.frequencies", "Binary Covarion frequencies of the visible states", UNITY_SCALE, 0.5, 0.0, 1.0);
-        createParameter("bcov.hfrequencies", "Binary Covarion frequencies of the hidden rates", UNITY_SCALE, 0.5, 0.0, 1.0);
-        createParameter("bcov.alpha", "Binary Covarion rate of evolution in slow mode", UNITY_SCALE, 0.5, 0.0, 1.0);
-        createParameter("bcov.s", "Binary Covarion rate of flipping between slow and fast modes", SUBSTITUTION_PARAMETER_SCALE, 0.5, 0.0, 100.0);
-
-        createParameter(SiteModel.SITE_MODEL + "." + "alpha", "gamma shape parameter", SUBSTITUTION_PARAMETER_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("siteModel1.alpha", "gamma shape parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("siteModel2.alpha", "gamma shape parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("siteModel3.alpha", "gamma shape parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 0.5, 0.0, Double.POSITIVE_INFINITY);
-
-        createParameter(SiteModel.SITE_MODEL + "." + "pInv", "proportion of invariant sites parameter", NONE, 0.5, 0.0, 1.0);
-        createParameter("siteModel1.pInv", "proportion of invariant sites parameter for codon position 1", NONE, 0.5, 0.0, 1.0);
-        createParameter("siteModel2.pInv", "proportion of invariant sites parameter for codon position 2", NONE, 0.5, 0.0, 1.0);
-        createParameter("siteModel3.pInv", "proportion of invariant sites parameter for codon position 3", NONE, 0.5, 0.0, 1.0);
-
-        createParameter("siteModel1.mu", "relative rate parameter for codon position 1", SUBSTITUTION_PARAMETER_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("siteModel2.mu", "relative rate parameter for codon position 2", SUBSTITUTION_PARAMETER_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("siteModel3.mu", "relative rate parameter for codon position 3", SUBSTITUTION_PARAMETER_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
-        createParameter("allMus", "All the relative rates");
-
-        // These are statistics which could have priors on...
-        createStatistic("meanRate", "The mean rate of evolution over the whole tree", 0.0, Double.POSITIVE_INFINITY);
-        createStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, "The variation in rate of evolution over the whole tree", 0.0, Double.POSITIVE_INFINITY);
-        createStatistic("covariance", "The covariance in rates of evolution on each lineage with their ancestral lineages", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
-
-        createOperator("constant.popSize", SCALE, 0.75, demoWeights);
-        createOperator("exponential.popSize", SCALE, 0.75, demoWeights);
-        createOperator("exponential.growthRate", RANDOM_WALK, 1.0, demoWeights);
-        createOperator("exponential.doublingTime", SCALE, 0.75, demoWeights);
-        createOperator("logistic.popSize", SCALE, 0.75, demoWeights);
-        createOperator("logistic.growthRate", SCALE, 0.75, demoWeights);
-        createOperator("logistic.doublingTime", SCALE, 0.75, demoWeights);
-        createOperator("logistic.t50", SCALE, 0.75, demoWeights);
-        createOperator("expansion.popSize", SCALE, 0.75, demoWeights);
-        createOperator("expansion.growthRate", SCALE, 0.75, demoWeights);
-        createOperator("expansion.doublingTime", SCALE, 0.75, demoWeights);
-        createOperator("expansion.ancestralProportion", SCALE, 0.75, demoWeights);
-        createOperator("skyline.popSize", SCALE, 0.75, demoWeights * 5);
-        createOperator("skyline.groupSize", INTEGER_DELTA_EXCHANGE, 1.0, demoWeights * 2);
-
-        createOperator("demographic.populationMean", SCALE, 0.9, demoWeights);
-        createOperator("demographic.indicators", BITFLIP, 1, 2 * treeWeights);
-        // hack pass distribution in name
-        createOperator("demographic.popSize", "demographic.populationMeanDist", "", "demographic.popSize", "demographic.indicators", SAMPLE_NONACTIVE, 1, 5 * demoWeights);
-        createOperator("demographic.scaleActive", "demographic.scaleActive", "", "demographic.popSize", "demographic.indicators", SCALE_WITH_INDICATORS, 0.5, 2 * demoWeights);
-
-        createOperator("yule.birthRate", SCALE, 0.75, demoWeights);
-//        createOperator("birthDeath.birthRate", SCALE, 0.75, demoWeights);
-//        createOperator("birthDeath.deathRate", SCALE, 0.75, demoWeights);
-
-        createOperator(BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME, SCALE, 0.75, demoWeights);
-        createOperator(BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, SCALE, 0.75, demoWeights);
-        //createOperator("birthDeath.samplingProportion", RANDOM_WALK, 0.75, demoWeights);
-
-        createOperator("clock.rate", SCALE, 0.75, rateWeights);
-        createOperator(UCED_MEAN, SCALE, 0.75, rateWeights);
-        createOperator(UCLD_MEAN, SCALE, 0.75, rateWeights);
-        createOperator(UCLD_STDEV, SCALE, 0.75, rateWeights);
-//        createOperator("swapBranchRateCategories", "branchRates.categories", "Performs a swap of branch rate categories", "branchRates.categories", SWAP, 1, branchWeights);
-        createOperator("randomWalkBranchRateCategories", "branchRates.categories", "Performs an integer random walk of branch rate categories", "branchRates.categories", INTEGER_RANDOM_WALK, 1, branchWeights);
-        createOperator("unformBranchRateCategories", "branchRates.categories", "Performs an integer uniform draw of branch rate categories", "branchRates.categories", INTEGER_UNIFORM, 1, branchWeights);
-
-        createOperator(LOCAL_CLOCK + "." + "rates", SCALE, 0.75, treeWeights);
-        createOperator(LOCAL_CLOCK + "." + "changes", BITFLIP, 1, treeWeights);
-        createOperator("treeBitMove", "Tree", "Swaps the rates and change locations of local clocks", "tree", TREE_BIT_MOVE, -1.0, treeWeights);
-
-        createOperator("hky.kappa", SCALE, 0.75, substWeights);
-        createOperator("hky1.kappa", SCALE, 0.75, substWeights);
-        createOperator("hky2.kappa", SCALE, 0.75, substWeights);
-        createOperator("hky3.kappa", SCALE, 0.75, substWeights);
-        createOperator("hky.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("hky1.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("hky2.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("hky3.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-
-        createOperator("gtr.ac", SCALE, 0.75, substWeights);
-        createOperator("gtr.ag", SCALE, 0.75, substWeights);
-        createOperator("gtr.at", SCALE, 0.75, substWeights);
-        createOperator("gtr.cg", SCALE, 0.75, substWeights);
-        createOperator("gtr.gt", SCALE, 0.75, substWeights);
-
-        createOperator("gtr1.ac", SCALE, 0.75, substWeights);
-        createOperator("gtr1.ag", SCALE, 0.75, substWeights);
-        createOperator("gtr1.at", SCALE, 0.75, substWeights);
-        createOperator("gtr1.cg", SCALE, 0.75, substWeights);
-        createOperator("gtr1.gt", SCALE, 0.75, substWeights);
-
-        createOperator("gtr2.ac", SCALE, 0.75, substWeights);
-        createOperator("gtr2.ag", SCALE, 0.75, substWeights);
-        createOperator("gtr2.at", SCALE, 0.75, substWeights);
-        createOperator("gtr2.cg", SCALE, 0.75, substWeights);
-        createOperator("gtr2.gt", SCALE, 0.75, substWeights);
-
-        createOperator("gtr3.ac", SCALE, 0.75, substWeights);
-        createOperator("gtr3.ag", SCALE, 0.75, substWeights);
-        createOperator("gtr3.at", SCALE, 0.75, substWeights);
-        createOperator("gtr3.cg", SCALE, 0.75, substWeights);
-        createOperator("gtr3.gt", SCALE, 0.75, substWeights);
-
-        createOperator("gtr.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("gtr1.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("gtr2.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("gtr3.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-
-        createOperator("bcov.alpha", SCALE, 0.75, substWeights);
-        createOperator("bcov.s", SCALE, 0.75, substWeights);
-        createOperator("bcov.frequencies", DELTA_EXCHANGE, 0.01, substWeights);
-        createOperator("bcov.hfrequencies", DELTA_EXCHANGE, 0.01, substWeights);
-
-        createOperator(SiteModel.SITE_MODEL + "." + "alpha", SCALE, 0.75, substWeights);
-        createOperator("siteModel1.alpha", SCALE, 0.75, substWeights);
-        createOperator("siteModel2.alpha", SCALE, 0.75, substWeights);
-        createOperator("siteModel3.alpha", SCALE, 0.75, substWeights);
-
-        createOperator(SiteModel.SITE_MODEL + "." + "pInv", SCALE, 0.75, substWeights);
-        createOperator("siteModel1.pInv", SCALE, 0.75, substWeights);
-        createOperator("siteModel2.pInv", SCALE, 0.75, substWeights);
-        createOperator("siteModel3.pInv", SCALE, 0.75, substWeights);
-
-        createOperator("upDownRateHeights", "Substitution rate and heights", "Scales substitution rates inversely to node heights of the tree", "clock.rate", "treeModel.allInternalNodeHeights", UP_DOWN, 0.75, rateWeights);
-        createOperator("upDownUCEDMeanHeights", "UCED mean and heights", "Scales UCED mean inversely to node heights of the tree", UCED_MEAN, "treeModel.allInternalNodeHeights", UP_DOWN, 0.75, rateWeights);
-        createOperator("upDownUCLDMeanHeights", "UCLD mean and heights", "Scales UCLD mean inversely to node heights of the tree", UCLD_MEAN, "treeModel.allInternalNodeHeights", UP_DOWN, 0.75, rateWeights);
-        createOperator("centeredMu", "Relative rates", "Scales codon position rates relative to each other maintaining mean", "allMus", CENTERED_SCALE, 0.75, substWeights);
-        createOperator("deltaMu", "Relative rates", "Changes codon position rates relative to each other maintaining mean", "allMus", DELTA_EXCHANGE, 0.75, substWeights);
-
-        createOperator("treeModel.rootHeight", SCALE, 0.75, demoWeights);
-        createOperator("uniformHeights", "Internal node heights", "Draws new internal node heights uniformally", "treeModel.internalNodeHeights", UNIFORM, -1, branchWeights);
-
-        createOperator("subtreeSlide", "Tree", "Performs the subtree-slide rearrangement of the tree", "tree", SUBTREE_SLIDE, 1.0, treeWeights);
-        createOperator("narrowExchange", "Tree", "Performs local rearrangements of the tree", "tree", NARROW_EXCHANGE, -1, treeWeights);
-        createOperator("wideExchange", "Tree", "Performs global rearrangements of the tree", "tree", WIDE_EXCHANGE, -1, demoWeights);
-        createOperator("wilsonBalding", "Tree", "Performs the Wilson-Balding rearrangement of the tree", "tree", WILSON_BALDING, -1, demoWeights);
-    }
-
-    protected void createScaleParameter(String name, String description, int scale, double value, double lower, double upper) {
-        Parameter p = createParameter(name, description, scale, value, lower, upper);
-        p.priorType = PriorType.JEFFREYS_PRIOR;
-    }
-
-    protected Parameter createParameter(String name, String description, int scale, double value, double lower, double upper) {
-        final Parameter parameter = new Parameter(name, description, scale, value, lower, upper);
-        parameters.put(name, parameter);
-        return parameter;
-    }
-
-    protected Parameter createParameter(String name, String description) {
-        final Parameter parameter = new Parameter(name, description);
-        parameters.put(name, parameter);
-        return parameter;
-    }
-
-    protected Parameter createStatistic(String name, String description, boolean isDiscrete) {
-        final Parameter parameter = new Parameter(name, description, isDiscrete);
-        parameters.put(name, parameter);
-        return parameter;
-    }
-
-    protected void createStatistic(String name, String description, double lower, double upper) {
-        parameters.put(name, new Parameter(name, description, lower, upper));
-    }
-
-    protected void createParameter(String name, String description, boolean isNodeHeight, double value, double lower, double upper) {
-        parameters.put(name, new Parameter(name, description, isNodeHeight, value, lower, upper));
-    }
-
-    protected void createOperator(String parameterName, String type, double tuning, double weight) {
-        Parameter parameter = getParameter(parameterName);
-        operators.put(parameter.name, new Operator(parameterName, "", parameter, type, tuning, weight));
-    }
-
-    protected void createOperator(String key, String name, String description, String parameterName, String type, double tuning, double weight) {
-        Parameter parameter = getParameter(parameterName);
-        operators.put(key, new Operator(name, description, parameter, type, tuning, weight));
-    }
-
-    protected void createOperator(String key, String name, String description, String parameterName1, String parameterName2, String type, double tuning, double weight) {
-        Parameter parameter1 = getParameter(parameterName1);
-        Parameter parameter2 = getParameter(parameterName2);
-        operators.put(key, new Operator(name, description, parameter1, parameter2, type, tuning, weight));
-    }
-
-    private double round(double value, int sf) {
-        NumberFormatter formatter = new NumberFormatter(sf);
-        try {
-            return NumberFormat.getInstance().parse(formatter.format(value)).doubleValue();
-        } catch (ParseException e) {
-            return value;
-        }
-    }
-
-    /**
-     * return an list of operators that are required
-     *
-     * @return the parameter list
-     */
-    public ArrayList<Parameter> selectParameters() {
-
-        ArrayList<Parameter> ops = new ArrayList<Parameter>();
-
-        selectParameters(ops);
-        selectStatistics(ops);
-
-        double growthRateMaximum = 1E6;
-        double birthRateMaximum = 1E6;
-        double substitutionRateMaximum = 100;
-        double logStdevMaximum = 10;
-        double substitutionParameterMaximum = 100;
-        double initialRootHeight = 1;
-        double initialRate = 1;
-
-
-        if (fixedSubstitutionRate) {
-            double rate = meanSubstitutionRate;
-
-            growthRateMaximum = 1E6 * rate;
-            birthRateMaximum = 1E6 * rate;
-
-            if (alignment != null) {
-                initialRootHeight = meanDistance / rate;
-
-                initialRootHeight = round(initialRootHeight, 2);
-            }
-
-        } else {
-            if (maximumTipHeight > 0) {
-                initialRootHeight = maximumTipHeight * 10.0;
-            }
-
-            initialRate = round((meanDistance * 0.2) / initialRootHeight, 2);
-        }
-
-        double timeScaleMaximum = round(initialRootHeight * 1000.0, 2);
-
-        for (Parameter param : ops) {
-            if (alignmentReset) param.priorEdited = false;
-
-            if (!param.priorEdited) {
-                switch (param.scale) {
-                    case TIME_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(timeScaleMaximum, param.upper);
-                        param.initial = initialRootHeight;
-                        break;
-                    case T50_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(timeScaleMaximum, param.upper);
-                        param.initial = initialRootHeight / 5.0;
-                        break;
-                    case GROWTH_RATE_SCALE:
-                        param.uniformLower = Math.max(-growthRateMaximum, param.lower);
-                        param.uniformUpper = Math.min(growthRateMaximum, param.upper);
-                        break;
-                    case BIRTH_RATE_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(birthRateMaximum, param.upper);
-                        break;
-                    case SUBSTITUTION_RATE_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(substitutionRateMaximum, param.upper);
-                        param.initial = initialRate;
-                        break;
-                    case LOG_STDEV_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(logStdevMaximum, param.upper);
-                        break;
-                    case SUBSTITUTION_PARAMETER_SCALE:
-                        param.uniformLower = Math.max(0.0, param.lower);
-                        param.uniformUpper = Math.min(substitutionParameterMaximum, param.upper);
-                        break;
-
-                    case UNITY_SCALE:
-                        param.uniformLower = 0.0;
-                        param.uniformUpper = 1.0;
-                        break;
-
-                }
-                if (param.isNodeHeight) {
-                    param.lower = maximumTipHeight;
-                    param.uniformLower = maximumTipHeight;
-                    param.uniformUpper = timeScaleMaximum;
-                    param.initial = initialRootHeight;
-                }
-            }
-        }
-
-        alignmentReset = false;
-
-        return ops;
-    }
-
-    /**
-     * return an list of operators that are required
-     *
-     * @return the operator list
-     */
-    public ArrayList<Operator> selectOperators() {
-
-        ArrayList<Operator> ops = new ArrayList<Operator>();
-
-        selectOperators(ops);
-
-        double initialRootHeight = 1;
-
-        if (fixedSubstitutionRate) {
-            double rate = meanSubstitutionRate;
-
-            if (alignment != null) {
-                initialRootHeight = meanDistance / rate;
-                initialRootHeight = round(initialRootHeight, 2);
-            }
-
-        } else {
-            if (maximumTipHeight > 0) {
-                initialRootHeight = maximumTipHeight * 10.0;
-            }
-        }
-
-        Operator op = getOperator("subtreeSlide");
-        if (!op.tuningEdited) {
-            op.tuning = initialRootHeight / 10.0;
-        }
-
-        return ops;
-    }
-
-    /**
-     * return a list of parameters that are required
-     *
-     * @param params the parameter list
-     */
-    private void selectParameters(ArrayList<Parameter> params) {
-
-        if (alignment != null) {
-
-            if (partitionCount > 1) {
-                for (int i = 1; i <= partitionCount; i++) {
-                    params.add(getParameter(SiteModel.SITE_MODEL + i + ".mu"));
-                }
-            }
-            switch (dataType) {
-                case DataType.NUCLEOTIDES:
-                    switch (nucSubstitutionModel) {
-                        case HKY:
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    params.add(getParameter("hky" + i + ".kappa"));
-                                }
-                            } else {
-                                params.add(getParameter("hky.kappa"));
-                            }
-                            break;
-                        case GTR:
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    params.add(getParameter("gtr" + i + ".ac"));
-                                    params.add(getParameter("gtr" + i + ".ag"));
-                                    params.add(getParameter("gtr" + i + ".at"));
-                                    params.add(getParameter("gtr" + i + ".cg"));
-                                    params.add(getParameter("gtr" + i + ".gt"));
-                                }
-                            } else {
-                                params.add(getParameter("gtr.ac"));
-                                params.add(getParameter("gtr.ag"));
-                                params.add(getParameter("gtr.at"));
-                                params.add(getParameter("gtr.cg"));
-                                params.add(getParameter("gtr.gt"));
-                            }
-                            break;
-
-                        default:
-                            throw new IllegalArgumentException("Unknown nucleotides substitution model");
-                    }
-                    break;
-
-                case DataType.AMINO_ACIDS:
-                    break;
-
-                case DataType.TWO_STATES:
-                case DataType.COVARION:
-                    switch (binarySubstitutionModel) {
-                        case BIN_SIMPLE:
-                            break;
-
-                        case BIN_COVARION:
-                            params.add(getParameter("bcov.alpha"));
-                            params.add(getParameter("bcov.s"));
-                            break;
-
-                        default:
-                            throw new IllegalArgumentException("Unknown binary substitution model");
-                    }
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown data type");
-            }
-
-            // if gamma do shape move
-            if (gammaHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        params.add(getParameter(SiteModel.SITE_MODEL + i + ".alpha"));
-                    }
-                } else {
-                    params.add(getParameter(SiteModel.SITE_MODEL + "." + "alpha"));
-                }
-            }
-            // if pinv do pinv move
-            if (invarHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        params.add(getParameter(SiteModel.SITE_MODEL + i + ".pInv"));
-                    }
-                } else {
-                    params.add(getParameter(SiteModel.SITE_MODEL + "." + "pInv"));
-                }
-            }
-
-            // if not fixed then do mutation rate move and up/down move
-            if (!fixedSubstitutionRate) {
-                Parameter rateParam;
-
-                if (clockModel == STRICT_CLOCK || clockModel == RANDOM_LOCAL_CLOCK) {
-                    rateParam = getParameter("clock.rate");
-                    params.add(rateParam);
-                } else {
-                    if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                        rateParam = getParameter(UCED_MEAN);
-                        params.add(rateParam);
-                    } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                        rateParam = getParameter(UCLD_MEAN);
-                        params.add(rateParam);
-                        params.add(getParameter(UCLD_STDEV));
-                    } else {
-                        throw new IllegalArgumentException("Unknown clock model");
-                    }
-                }
-
-                rateParam.isFixed = false;
-            } else {
-                Parameter rateParam;
-                if (clockModel == STRICT_CLOCK || clockModel == RANDOM_LOCAL_CLOCK) {
-                    rateParam = getParameter("clock.rate");
-                } else {
-                    if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                        rateParam = getParameter(UCED_MEAN);
-                    } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                        rateParam = getParameter(UCLD_MEAN);
-                        params.add(getParameter(UCLD_STDEV));
-                    } else {
-                        throw new IllegalArgumentException("Unknown clock model");
-                    }
-                }
-                rateParam.isFixed = true;
-            }
-        }
-
-        if (nodeHeightPrior == CONSTANT) {
-            params.add(getParameter("constant.popSize"));
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            params.add(getParameter("exponential.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                params.add(getParameter("exponential.growthRate"));
-            } else {
-                params.add(getParameter("exponential.doublingTime"));
-            }
-        } else if (nodeHeightPrior == LOGISTIC) {
-            params.add(getParameter("logistic.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                params.add(getParameter("logistic.growthRate"));
-            } else {
-                params.add(getParameter("logistic.doublingTime"));
-            }
-            params.add(getParameter("logistic.t50"));
-        } else if (nodeHeightPrior == EXPANSION) {
-            params.add(getParameter("expansion.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                params.add(getParameter("expansion.growthRate"));
-            } else {
-                params.add(getParameter("expansion.doublingTime"));
-            }
-            params.add(getParameter("expansion.ancestralProportion"));
-        } else if (nodeHeightPrior == SKYLINE) {
-            params.add(getParameter("skyline.popSize"));
-        } else if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            params.add(getParameter("demographic.populationSizeChanges"));
-            params.add(getParameter("demographic.populationMean"));
-        } else if (nodeHeightPrior == YULE) {
-            params.add(getParameter("yule.birthRate"));
-        } else if (nodeHeightPrior == BIRTH_DEATH) {
-//            params.add(getParameter("birthDeath.birthRate"));
-//            params.add(getParameter("birthDeath.deathRate"));
-            params.add(getParameter(BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME));
-            params.add(getParameter(BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME));
-            // at present we are not allowing the sampling of samplingProportion
-        }
-
-        params.add(getParameter("treeModel.rootHeight"));
-    }
-
-    private void selectStatistics(ArrayList<Parameter> params) {
-
-        if (taxonSets != null) {
-            for (Taxa taxonSet : taxonSets) {
-                Parameter statistic = statistics.get(taxonSet);
-                if (statistic == null) {
-                    statistic = new Parameter(taxonSet, "tMRCA for taxon set ");
-                    statistics.put(taxonSet, statistic);
-                }
-                params.add(statistic);
-            }
-        }
-
-        if (clockModel == RANDOM_LOCAL_CLOCK) {
-            if (localClockRateChangesStatistic == null) {
-                localClockRateChangesStatistic = new Parameter("rateChanges", "number of random local clocks", true);
-                localClockRateChangesStatistic.priorType = PriorType.POISSON_PRIOR;
-                localClockRateChangesStatistic.poissonMean = 1.0;
-                localClockRateChangesStatistic.poissonOffset = 0.0;
-            }
-            if (localClockRatesStatistic == null) {
-                localClockRatesStatistic = new Parameter(LOCAL_CLOCK + "." + "rates", "random local clock rates", false);
-
-                localClockRatesStatistic.priorType = PriorType.GAMMA_PRIOR;
-                localClockRatesStatistic.gammaAlpha = 0.5;
-                localClockRatesStatistic.gammaBeta = 2.0;
-            }
-            params.add(localClockRatesStatistic);
-            params.add(localClockRateChangesStatistic);
-        }
-
-        if (clockModel != STRICT_CLOCK) {
-            params.add(getParameter("meanRate"));
-            params.add(getParameter(RateStatisticParser.COEFFICIENT_OF_VARIATION));
-            params.add(getParameter("covariance"));
-        }
-    }
-
-    protected Parameter getParameter(String name) {
-        Parameter parameter = parameters.get(name);
-        if (parameter == null) throw new IllegalArgumentException("Parameter with name, " + name + ", is unknown");
-        return parameter;
-    }
-
-    /**
-     * return a list of operators that are required
-     *
-     * @param ops the operator list
-     */
-    private void selectOperators(ArrayList<Operator> ops) {
-
-        if (alignment != null) {
-            switch (dataType) {
-                case DataType.NUCLEOTIDES:
-
-                    switch (nucSubstitutionModel) {
-                        case HKY:
-                            // if (frequencyPolicy == BeautiOptions.ESTIMATED || frequencyPolicy == BeautiOptions.EMPIRICAL){
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    ops.add(getOperator("hky" + i + ".kappa"));
-                                }
-                            } else {
-                                ops.add(getOperator("hky.kappa"));
-                            }
-                            //}
-                            if (frequencyPolicy == BeautiOptions.ESTIMATED) {
-                                if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                    for (int i = 1; i <= partitionCount; i++) {
-                                        ops.add(getOperator("hky" + i + ".frequencies"));
-                                    }
-                                } else {
-                                    ops.add(getOperator("hky.frequencies"));
-                                }
-                            }
-                            break;
-
-                        case GTR:
-                            //if (frequencyPolicy == BeautiOptions.ESTIMATED || frequencyPolicy == BeautiOptions.EMPIRICAL){
-                            if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                for (int i = 1; i <= partitionCount; i++) {
-                                    ops.add(getOperator("gtr" + i + ".ac"));
-                                    ops.add(getOperator("gtr" + i + ".ag"));
-                                    ops.add(getOperator("gtr" + i + ".at"));
-                                    ops.add(getOperator("gtr" + i + ".cg"));
-                                    ops.add(getOperator("gtr" + i + ".gt"));
-                                }
-                            } else {
-                                ops.add(getOperator("gtr.ac"));
-                                ops.add(getOperator("gtr.ag"));
-                                ops.add(getOperator("gtr.at"));
-                                ops.add(getOperator("gtr.cg"));
-                                ops.add(getOperator("gtr.gt"));
-                            }
-                            //}
-
-                            if (frequencyPolicy == BeautiOptions.ESTIMATED) {
-                                if (partitionCount > 1 && unlinkedSubstitutionModel) {
-                                    for (int i = 1; i <= partitionCount; i++) {
-                                        ops.add(getOperator("gtr" + i + ".frequencies"));
-                                    }
-                                } else {
-                                    ops.add(getOperator("gtr.frequencies"));
-                                }
-                            }
-                            break;
-
-                        default:
-                            throw new IllegalArgumentException("Unknown nucleotides substitution model");
-                    }
-
-                    break;
-
-                case DataType.AMINO_ACIDS:
-                    break;
-
-                case DataType.TWO_STATES:
-                case DataType.COVARION:
-                    switch (binarySubstitutionModel) {
-                        case BIN_SIMPLE:
-                            break;
-
-                        case BIN_COVARION:
-                            ops.add(getOperator("bcov.alpha"));
-                            ops.add(getOperator("bcov.s"));
-                            ops.add(getOperator("bcov.frequencies"));
-                            ops.add(getOperator("bcov.hfrequencies"));
-                            break;
-
-                        default:
-                            throw new IllegalArgumentException("Unknown binary substitution model");
-                    }
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown data type");
-            }
-
-            // if gamma do shape move
-            if (gammaHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        ops.add(getOperator(SiteModel.SITE_MODEL + i + ".alpha"));
-                    }
-                } else {
-                    ops.add(getOperator(SiteModel.SITE_MODEL + "." + "alpha"));
-                }
-            }
-            // if pinv do pinv move
-            if (invarHetero) {
-                if (partitionCount > 1 && unlinkedHeterogeneityModel) {
-                    for (int i = 1; i <= partitionCount; i++) {
-                        ops.add(getOperator(SiteModel.SITE_MODEL + i + ".pInv"));
-                    }
-                } else {
-                    ops.add(getOperator(SiteModel.SITE_MODEL + "." + "pInv"));
-                }
-            }
-
-            if (partitionCount > 1) {
-                if (!codonHeteroPattern.equals("112")) {
-                    ops.add(getOperator("centeredMu"));
-                }
-                ops.add(getOperator("deltaMu"));
-            }
-
-            // if not fixed then do mutation rate move and up/down move
-            if (!fixedSubstitutionRate) {
-                if (clockModel == STRICT_CLOCK) {
-                    ops.add(getOperator("clock.rate"));
-                    ops.add(getOperator("upDownRateHeights"));
-                } else if (clockModel == RANDOM_LOCAL_CLOCK) {
-                    ops.add(getOperator("clock.rate"));
-                    ops.add(getOperator("upDownRateHeights"));
-                    ops.add(getOperator(LOCAL_CLOCK + "." + "rates"));
-                    ops.add(getOperator(LOCAL_CLOCK + "." + "changes"));
-                    ops.add(getOperator("treeBitMove"));
-                } else {
-                    if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                        ops.add(getOperator(UCED_MEAN));
-                        ops.add(getOperator("upDownUCEDMeanHeights"));
-                    } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                        ops.add(getOperator(UCLD_MEAN));
-                        ops.add(getOperator(UCLD_STDEV));
-                        ops.add(getOperator("upDownUCLDMeanHeights"));
-                    } else {
-                        throw new IllegalArgumentException("Unknown clock model");
-                    }
-//                    ops.add(getOperator("swapBranchRateCategories"));
-                    ops.add(getOperator("randomWalkBranchRateCategories"));
-                    ops.add(getOperator("unformBranchRateCategories"));
-                }
-            } else {
-                if (clockModel == STRICT_CLOCK) {
-                    // no parameter to operator on
-                } else if (clockModel == RANDOM_LOCAL_CLOCK) {
-                    ops.add(getOperator(LOCAL_CLOCK + "." + "rates"));
-                    ops.add(getOperator(LOCAL_CLOCK + "." + "changes"));
-                    ops.add(getOperator("treeBitMove"));
-                } else {
-                    if (clockModel == UNCORRELATED_EXPONENTIAL) {
-                        // no parameter to operator on
-                    } else if (clockModel == UNCORRELATED_LOGNORMAL) {
-                        ops.add(getOperator(UCLD_STDEV));
-                    } else {
-                        throw new IllegalArgumentException("Unknown clock model");
-                    }
- //                   ops.add(getOperator("swapBranchRateCategories"));
-                    ops.add(getOperator("randomWalkBranchRateCategories"));
-                    ops.add(getOperator("unformBranchRateCategories"));
-                }
-            }
-        }
-
-        if (nodeHeightPrior == CONSTANT) {
-            ops.add(getOperator("constant.popSize"));
-        } else if (nodeHeightPrior == EXPONENTIAL) {
-            ops.add(getOperator("exponential.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                ops.add(getOperator("exponential.growthRate"));
-            } else {
-                ops.add(getOperator("exponential.doublingTime"));
-            }
-        } else if (nodeHeightPrior == LOGISTIC) {
-            ops.add(getOperator("logistic.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                ops.add(getOperator("logistic.growthRate"));
-            } else {
-                ops.add(getOperator("logistic.doublingTime"));
-            }
-            ops.add(getOperator("logistic.t50"));
-        } else if (nodeHeightPrior == EXPANSION) {
-            ops.add(getOperator("expansion.popSize"));
-            if (parameterization == GROWTH_RATE) {
-                ops.add(getOperator("expansion.growthRate"));
-            } else {
-                ops.add(getOperator("expansion.doublingTime"));
-            }
-            ops.add(getOperator("expansion.ancestralProportion"));
-        } else if (nodeHeightPrior == SKYLINE) {
-            ops.add(getOperator("skyline.popSize"));
-            ops.add(getOperator("skyline.groupSize"));
-        } else if (nodeHeightPrior == EXTENDED_SKYLINE) {
-            ops.add(getOperator("demographic.populationMean"));
-            ops.add(getOperator("demographic.popSize"));
-            ops.add(getOperator("demographic.indicators"));
-            ops.add(getOperator("demographic.scaleActive"));
-        } else if (nodeHeightPrior == YULE) {
-            ops.add(getOperator("yule.birthRate"));
-        } else if (nodeHeightPrior == BIRTH_DEATH) {
-//            ops.add(getOperator("birthDeath.birthRate"));
-//            ops.add(getOperator("birthDeath.deathRate"));
-            ops.add(getOperator(BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME));
-            ops.add(getOperator(BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME));
-            // at present we are not allowing the sampling of samplingProportion
-        }
-
-        ops.add(getOperator("treeModel.rootHeight"));
-        ops.add(getOperator("uniformHeights"));
-
-        // if not a fixed tree then sample tree space
-        if (!fixedTree) {
-            ops.add(getOperator("subtreeSlide"));
-            ops.add(getOperator("narrowExchange"));
-            ops.add(getOperator("wideExchange"));
-            ops.add(getOperator("wilsonBalding"));
-        }
-    }
-
-    private Operator getOperator(String name) {
-        Operator operator = operators.get(name);
-        if (operator == null) throw new IllegalArgumentException("Operator with name, " + name + ", is unknown");
-        return operator;
-    }
-
-    /**
-     * Read options from a file
-     *
-     * @param includeData include a data block?
-     * @param guessDates  guess dates?
-     * @return the Document
-     */
-    public Document create(boolean includeData, boolean guessDates) {
-
-        Element root = new Element("beauti");
-        root.setAttribute("version", version);
-
-        Element dataElement = new Element("data");
-
-        //dataElement.addContent(createChild("fileNameStem", fileNameStem));
-
-        dataElement.addContent(createChild("datesUnits", datesUnits));
-        dataElement.addContent(createChild("datesDirection", datesDirection));
-        dataElement.addContent(createChild("translation", translation));
-        dataElement.addContent(createChild("userTree", userTree));
-
-        if (includeData && originalAlignment != null) {
-            Element alignmentElement = new Element(AlignmentParser.ALIGNMENT);
-            alignmentElement.addContent(createChild("dataType", originalAlignment.getDataType().getType()));
-            for (int i = 0; i < originalAlignment.getTaxonCount(); i++) {
-                Element taxonElement = new Element(TaxonParser.TAXON);
-                taxonElement.addContent(createChild(XMLParser.ID, originalAlignment.getTaxonId(i)));
-                dr.evolution.util.Date date = originalAlignment.getTaxon(i).getDate();
-                if (date != null) {
-                    taxonElement.addContent(createChild("date", date.getTimeValue()));
-                }
-                Sequence sequence = originalAlignment.getSequence(i);
-                taxonElement.addContent(createChild("sequence", sequence.getSequenceString()));
-                alignmentElement.addContent(taxonElement);
-            }
-            dataElement.addContent(alignmentElement);
-        }
-
-        dataElement.addContent(createChild("guessDates", guessDates));
-        dataElement.addContent(createChild("guessDateFromOrder", guessDateFromOrder));
-        dataElement.addContent(createChild("fromLast", fromLast));
-        dataElement.addContent(createChild("order", order));
-        dataElement.addContent(createChild("prefix", prefix));
-        dataElement.addContent(createChild("offset", offset));
-        dataElement.addContent(createChild("unlessLessThan", unlessLessThan));
-        dataElement.addContent(createChild("offset2", offset2));
-
-        root.addContent(dataElement);
-
-        Element taxaElement = new Element(TaxaParser.TAXA);
-
-        for (Taxa taxonSet : taxonSets) {
-            Element taxonSetElement = new Element("taxonSet");
-            taxonSetElement.addContent(createChild(XMLParser.ID, taxonSet.getId()));
-            taxonSetElement.addContent(createChild("enforceMonophyly",
-                    taxonSetsMono.get(taxonSet) ? "true" : "false"));
-            for (int j = 0; j < taxonSet.getTaxonCount(); j++) {
-                Element taxonElement = new Element(TaxonParser.TAXON);
-                taxonElement.addContent(createChild(XMLParser.ID, taxonSet.getTaxon(j).getId()));
-                taxonSetElement.addContent(taxonElement);
-            }
-            taxaElement.addContent(taxonSetElement);
-        }
-
-        root.addContent(taxaElement);
-
-        Element modelElement = new Element("model");
-
-        modelElement.addContent(createChild("nucSubstitutionModel", nucSubstitutionModel));
-        modelElement.addContent(createChild("aaSubstitutionModel", aaSubstitutionModel));
-        modelElement.addContent(createChild("binarySubstitutionModel", binarySubstitutionModel));
-        modelElement.addContent(createChild("frequencyPolicy", frequencyPolicy));
-        modelElement.addContent(createChild("gammaHetero", gammaHetero));
-        modelElement.addContent(createChild("gammaCategories", gammaCategories));
-        modelElement.addContent(createChild("invarHetero", invarHetero));
-        modelElement.addContent(createChild("codonHeteroPattern", codonHeteroPattern));
-        modelElement.addContent(createChild("maximumTipHeight", maximumTipHeight));
-        modelElement.addContent(createChild("hasSetFixedSubstitutionRate", hasSetFixedSubstitutionRate));
-        modelElement.addContent(createChild("meanSubstitutionRate", meanSubstitutionRate));
-        modelElement.addContent(createChild("fixedSubstitutionRate", fixedSubstitutionRate));
-        modelElement.addContent(createChild("unlinkedSubstitutionModel", unlinkedSubstitutionModel));
-        modelElement.addContent(createChild("unlinkedHeterogeneityModel", unlinkedHeterogeneityModel));
-        modelElement.addContent(createChild("unlinkedFrequencyModel", unlinkedFrequencyModel));
-        modelElement.addContent(createChild("clockModel", clockModel));
-        modelElement.addContent(createChild("nodeHeightPrior", nodeHeightPrior));
-        modelElement.addContent(createChild("parameterization", parameterization));
-        modelElement.addContent(createChild("skylineGroupCount", skylineGroupCount));
-        modelElement.addContent(createChild("skylineModel", skylineModel));
-        modelElement.addContent(createChild("fixedTree", fixedTree));
-
-        root.addContent(modelElement);
-
-        Element priorsElement = new Element("priors");
-
-        for (String name : parameters.keySet()) {
-            Parameter parameter = parameters.get(name);
-            Element e = new Element(name);
-            e.addContent(createChild("initial", parameter.initial));
-            e.addContent(createChild("priorType", parameter.priorType));
-            e.addContent(createChild("priorEdited", parameter.priorEdited));
-            e.addContent(createChild("uniformLower", parameter.uniformLower));
-            e.addContent(createChild("uniformUpper", parameter.uniformUpper));
-            e.addContent(createChild("exponentialMean", parameter.exponentialMean));
-            e.addContent(createChild("exponentialOffset", parameter.exponentialOffset));
-            e.addContent(createChild("normalMean", parameter.normalMean));
-            e.addContent(createChild("normalStdev", parameter.normalStdev));
-            e.addContent(createChild("logNormalMean", parameter.logNormalMean));
-            e.addContent(createChild("logNormalStdev", parameter.logNormalStdev));
-            e.addContent(createChild("logNormalOffset", parameter.logNormalOffset));
-            e.addContent(createChild("gammaAlpha", parameter.gammaAlpha));
-            e.addContent(createChild("gammaBeta", parameter.gammaBeta));
-            e.addContent(createChild("gammaOffset", parameter.gammaOffset));
-            priorsElement.addContent(e);
-        }
-
-        for (Taxa taxonSet : taxonSets) {
-            Parameter statistic = statistics.get(taxonSet);
-            Element e = new Element(statistic.getXMLName());
-            e.addContent(createChild("initial", statistic.initial));
-            e.addContent(createChild("priorType", statistic.priorType));
-            e.addContent(createChild("priorEdited", statistic.priorEdited));
-            e.addContent(createChild("uniformLower", statistic.uniformLower));
-            e.addContent(createChild("uniformUpper", statistic.uniformUpper));
-            e.addContent(createChild("exponentialMean", statistic.exponentialMean));
-            e.addContent(createChild("exponentialOffset", statistic.exponentialOffset));
-            e.addContent(createChild("normalMean", statistic.normalMean));
-            e.addContent(createChild("normalStdev", statistic.normalStdev));
-            e.addContent(createChild("logNormalMean", statistic.logNormalMean));
-            e.addContent(createChild("logNormalStdev", statistic.logNormalStdev));
-            e.addContent(createChild("logNormalOffset", statistic.logNormalOffset));
-            e.addContent(createChild("gammaAlpha", statistic.gammaAlpha));
-            e.addContent(createChild("gammaBeta", statistic.gammaBeta));
-            e.addContent(createChild("gammaOffset", statistic.gammaOffset));
-            priorsElement.addContent(e);
-        }
-
-        root.addContent(priorsElement);
-
-        Element operatorsElement = new Element("operators");
-
-        operatorsElement.addContent(createChild("autoOptimize", autoOptimize));
-        for (String name : operators.keySet()) {
-            Operator operator = operators.get(name);
-            Element e = new Element(name);
-            e.addContent(createChild("tuning", operator.tuning));
-            e.addContent(createChild("tuningEdited", operator.tuningEdited));
-            e.addContent(createChild("weight", operator.weight));
-            e.addContent(createChild("inUse", operator.inUse));
-            operatorsElement.addContent(e);
-        }
-
-        root.addContent(operatorsElement);
-
-        Element mcmcElement = new Element("mcmc");
-
-        mcmcElement.addContent(createChild("upgmaStartingTree", upgmaStartingTree));
-        mcmcElement.addContent(createChild("chainLength", chainLength));
-        mcmcElement.addContent(createChild("logEvery", logEvery));
-        mcmcElement.addContent(createChild("echoEvery", echoEvery));
-        //if (logFileName != null) mcmcElement.addContent(createChild("logFileName", logFileName));
-        //if (treeFileName != null) mcmcElement.addContent(createChild("treeFileName", treeFileName));
-        //mcmcElement.addContent(createChild("mapTreeLog", mapTreeLog));
-        //if (mapTreeFileName != null) mcmcElement.addContent(createChild("mapTreeFileName", mapTreeFileName));
-        mcmcElement.addContent(createChild("substTreeLog", substTreeLog));
-        //if (substTreeFileName != null) mcmcElement.addContent(createChild("substTreeFileName", substTreeFileName));
-
-        root.addContent(mcmcElement);
-
-        return new Document(root);
-    }
-
-    private Element createChild(String name, String value) {
-        Element e = new Element(name);
-        if (value != null) {
-            e.setText(value);
-        }
-        return e;
-    }
-
-    private Element createChild(String name, int value) {
-        Element e = new Element(name);
-        e.setText(Integer.toString(value));
-        return e;
-    }
-
-    private Element createChild(String name, PriorType value) {
-        Element e = new Element(name);
-        e.setText(value.name());
-        return e;
-    }
-
-    private Element createChild(String name, double value) {
-        Element e = new Element(name);
-        e.setText(Double.toString(value));
-        return e;
-    }
-
-    private Element createChild(String name, boolean value) {
-        Element e = new Element(name);
-        e.setText(value ? "true" : "false");
-        return e;
-    }
-
-    /**
-     * Read options from a file
-     *
-     * @param document the Document
-     * @throws dr.xml.XMLParseException if there is a problem with XML parsing
-     */
-    public void parse(Document document) throws dr.xml.XMLParseException {
-
-        Element root = document.getRootElement();
-        if (!root.getName().equals("beauti")) {
-            throw new dr.xml.XMLParseException("This document does not appear to be a BEAUti file");
-        }
-
-        Element dataElement = root.getChild("data");
-        Element taxaElement = root.getChild(TaxaParser.TAXA);
-        Element modelElement = root.getChild("model");
-        Element priorsElement = root.getChild("priors");
-        Element operatorsElement = root.getChild("operators");
-        Element mcmcElement = root.getChild("mcmc");
-
-        if (dataElement != null) {
-            //fileNameStem = getStringChild(dataElement, "fileNameStem", "untitled");
-
-            datesUnits = getIntegerChild(dataElement, "datesUnits", YEARS);
-            datesDirection = getIntegerChild(dataElement, "datesDirection", FORWARDS);
-            translation = getIntegerChild(dataElement, "translation", NONE);
-            userTree = getBooleanChild(dataElement, "userTree", false);
-
-            Units.Type theUnits = Units.Type.SUBSTITUTIONS;
-            if (datesUnits == YEARS) theUnits = Units.Type.YEARS;
-            if (datesUnits == MONTHS) theUnits = Units.Type.MONTHS;
-            if (datesUnits == DAYS) theUnits = Units.Type.DAYS;
-
-            Element alignmentElement = dataElement.getChild(AlignmentParser.ALIGNMENT);
-            if (alignmentElement != null) {
-                originalAlignment = new SimpleAlignment();
-
-                int dataType = getIntegerChild(alignmentElement, "dataType", DataType.NUCLEOTIDES);
-                switch (dataType) {
-                    case DataType.NUCLEOTIDES:
-                        originalAlignment.setDataType(Nucleotides.INSTANCE);
-                        break;
-                    case DataType.AMINO_ACIDS:
-                        originalAlignment.setDataType(AminoAcids.INSTANCE);
-                        break;
-                    case DataType.TWO_STATES:
-                        originalAlignment.setDataType(TwoStates.INSTANCE);
-                        break;
-                    default:
-                        originalAlignment.setDataType(Nucleotides.INSTANCE);
-                }
-
-                for (Object o : alignmentElement.getChildren(TaxonParser.TAXON)) {
-                    Element taxonElement = (Element) o;
-
-                    String id = getStringChild(taxonElement, XMLParser.ID, "");
-                    Taxon taxon = new Taxon(id);
-
-                    if (taxonElement.getChild("date") != null) {
-                        double dateValue = getDoubleChild(taxonElement, "date", 0.0);
-
-                        if (datesDirection == FORWARDS) {
-                            taxon.setDate(Date.createTimeSinceOrigin(dateValue, theUnits, 0.0));
-                        } else {
-                            taxon.setDate(Date.createTimeAgoFromOrigin(dateValue, theUnits, 0.0));
-                        }
-                    }
-                    String seqString = getStringChild(taxonElement, "sequence", "");
-                    Sequence sequence = new Sequence(taxon, seqString);
-
-                    originalAlignment.addSequence(sequence);
-                }
-                taxonList = originalAlignment;
-                alignment = originalAlignment;
-            }
-
-            guessDates = getBooleanChild(dataElement, "guessDates", false);
-            guessDateFromOrder = getBooleanChild(dataElement, "guessDateFromOrder", false);
-            fromLast = getBooleanChild(dataElement, "fromLast", false);
-            order = getIntegerChild(dataElement, "order", 0);
-            prefix = getStringChild(dataElement, "prefix", "");
-            offset = getDoubleChild(dataElement, "offset", 0);
-            unlessLessThan = getDoubleChild(dataElement, "unlessLessThan", 0);
-            offset2 = getDoubleChild(dataElement, "offset2", 0);
-        }
-
-        if (taxaElement != null) {
-            for (Object ts : taxaElement.getChildren("taxonSet")) {
-                Element taxonSetElement = (Element) ts;
-
-                String id = getStringChild(taxonSetElement, XMLParser.ID, "");
-                final Taxa taxonSet = new Taxa(id);
-
-                Boolean enforceMonophyly = Boolean.valueOf(getStringChild(taxonSetElement, "enforceMonophyly", "false"));
-                for (Object o : taxonSetElement.getChildren(TaxonParser.TAXON)) {
-                    Element taxonElement = (Element) o;
-                    String taxonId = getStringChild(taxonElement, XMLParser.ID, "");
-                    int index = taxonList.getTaxonIndex(taxonId);
-                    if (index != -1) {
-                        taxonSet.addTaxon(taxonList.getTaxon(index));
-                    }
-                }
-                taxonSets.add(taxonSet);
-                taxonSetsMono.put(taxonSet, enforceMonophyly);
-            }
-        }
-
-        if (modelElement != null) {
-            nucSubstitutionModel = getIntegerChild(modelElement, "nucSubstitutionModel", HKY);
-            aaSubstitutionModel = getIntegerChild(modelElement, "aaSubstitutionModel", BLOSUM_62);
-            binarySubstitutionModel = getIntegerChild(modelElement, "binarySubstitutionModel", BIN_SIMPLE);
-            frequencyPolicy = getIntegerChild(modelElement, "frequencyPolicy", ESTIMATED);
-            gammaHetero = getBooleanChild(modelElement, "gammaHetero", false);
-            gammaCategories = getIntegerChild(modelElement, "gammaCategories", 5);
-            invarHetero = getBooleanChild(modelElement, "invarHetero", false);
-            codonHeteroPattern = (getBooleanChild(modelElement, "codonHetero", false) ? "123" : null);
-            codonHeteroPattern = getStringChild(modelElement, "codonHeteroPattern", null);
-            maximumTipHeight = getDoubleChild(modelElement, "maximumTipHeight", 0.0);
-            fixedSubstitutionRate = getBooleanChild(modelElement, "fixedSubstitutionRate", false);
-            hasSetFixedSubstitutionRate = getBooleanChild(modelElement, "hasSetFixedSubstitutionRate", false);
-            meanSubstitutionRate = getDoubleChild(modelElement, "meanSubstitutionRate", 1.0);
-            unlinkedSubstitutionModel = getBooleanChild(modelElement, "unlinkedSubstitutionModel", false);
-            unlinkedHeterogeneityModel = getBooleanChild(modelElement, "unlinkedHeterogeneityModel", false);
-            unlinkedFrequencyModel = getBooleanChild(modelElement, "unlinkedFrequencyModel", false);
-
-            clockModel = getIntegerChild(modelElement, "clockModel", clockModel);
-
-            // the old name was "coalescentModel" so try to read this first
-            nodeHeightPrior = getIntegerChild(modelElement, "coalescentModel", CONSTANT);
-            nodeHeightPrior = getIntegerChild(modelElement, "nodeHeightPrior", nodeHeightPrior);
-            // we don't allow no nodeHeightPrior in BEAUti so switch it to Yule:
-            if (nodeHeightPrior == NONE) nodeHeightPrior = YULE;
-
-            parameterization = getIntegerChild(modelElement, "parameterization", GROWTH_RATE);
-            skylineGroupCount = getIntegerChild(modelElement, "skylineGroupCount", 10);
-            skylineModel = getIntegerChild(modelElement, "skylineModel", CONSTANT_SKYLINE);
-            fixedTree = getBooleanChild(modelElement, "fixedTree", false);
-        }
-
-        if (operatorsElement != null) {
-            autoOptimize = getBooleanChild(operatorsElement, "autoOptimize", true);
-            for (String name : operators.keySet()) {
-                Operator operator = operators.get(name);
-                Element e = operatorsElement.getChild(name);
-                if (e == null) {
-                    throw new XMLParseException("Operators element, " + name + " missing");
-                }
-
-                operator.tuning = getDoubleChild(e, "tuning", 1.0);
-                operator.tuningEdited = getBooleanChild(e, "tuningEdited", false);
-                operator.weight = getDoubleChild(e, "weight", 1);
-                operator.inUse = getBooleanChild(e, "inUse", true);
-            }
-        }
-
-        if (priorsElement != null) {
-            for (String name : parameters.keySet()) {
-                Parameter parameter = parameters.get(name);
-                Element e = priorsElement.getChild(name);
-                if (e == null) {
-                    throw new XMLParseException("Priors element, " + name + " missing");
-                }
-
-                parameter.initial = getDoubleChild(e, "initial", 1.0);
-                parameter.priorType = PriorType.valueOf(getStringChild(e, "priorType", PriorType.UNIFORM_PRIOR.name()));
-                parameter.priorEdited = getBooleanChild(e, "priorEdited", false);
-                parameter.uniformLower = Math.max(getDoubleChild(e, "uniformLower", parameter.uniformLower), parameter.lower);
-                parameter.uniformUpper = Math.min(getDoubleChild(e, "uniformUpper", parameter.uniformUpper), parameter.upper);
-                parameter.exponentialMean = getDoubleChild(e, "exponentialMean", parameter.exponentialMean);
-                parameter.exponentialOffset = getDoubleChild(e, "exponentialOffset", parameter.exponentialOffset);
-                parameter.normalMean = getDoubleChild(e, "normalMean", parameter.normalMean);
-                parameter.normalStdev = getDoubleChild(e, "normalStdev", parameter.normalStdev);
-                parameter.logNormalMean = getDoubleChild(e, "logNormalMean", parameter.logNormalMean);
-                parameter.logNormalStdev = getDoubleChild(e, "logNormalStdev", parameter.logNormalStdev);
-                parameter.logNormalOffset = getDoubleChild(e, "logNormalOffset", parameter.logNormalOffset);
-                parameter.gammaAlpha = getDoubleChild(e, "gammaAlpha", parameter.gammaAlpha);
-                parameter.gammaBeta = getDoubleChild(e, "gammaBeta", parameter.gammaBeta);
-                parameter.gammaOffset = getDoubleChild(e, "gammaOffset", parameter.gammaOffset);
-            }
-
-            for (Taxa taxonSet : taxonSets) {
-                Parameter statistic = statistics.get(taxonSet);
-                if (statistic == null) {
-                    statistic = new Parameter(taxonSet, "tMRCA for taxon set ");
-                    statistics.put(taxonSet, statistic);
-                }
-                Element e = priorsElement.getChild(statistic.getXMLName());
-                statistic.initial = getDoubleChild(e, "initial", 1.0);
-                statistic.priorType = PriorType.valueOf(getStringChild(e, "priorType", PriorType.UNIFORM_PRIOR.name()));
-                statistic.priorEdited = getBooleanChild(e, "priorEdited", false);
-                statistic.uniformLower = getDoubleChild(e, "uniformLower", statistic.uniformLower);
-                statistic.uniformUpper = getDoubleChild(e, "uniformUpper", statistic.uniformUpper);
-                statistic.exponentialMean = getDoubleChild(e, "exponentialMean", statistic.exponentialMean);
-                statistic.exponentialOffset = getDoubleChild(e, "exponentialOffset", statistic.exponentialOffset);
-                statistic.normalMean = getDoubleChild(e, "normalMean", statistic.normalMean);
-                statistic.normalStdev = getDoubleChild(e, "normalStdev", statistic.normalStdev);
-                statistic.logNormalMean = getDoubleChild(e, "logNormalMean", statistic.logNormalMean);
-                statistic.logNormalStdev = getDoubleChild(e, "logNormalStdev", statistic.logNormalStdev);
-                statistic.logNormalOffset = getDoubleChild(e, "logNormalOffset", statistic.logNormalOffset);
-                statistic.gammaAlpha = getDoubleChild(e, "gammaAlpha", statistic.gammaAlpha);
-                statistic.gammaBeta = getDoubleChild(e, "gammaBeta", statistic.gammaBeta);
-                statistic.gammaOffset = getDoubleChild(e, "gammaOffset", statistic.gammaOffset);
-            }
-
-        }
-
-
-        if (mcmcElement != null) {
-            upgmaStartingTree = getBooleanChild(mcmcElement, "upgmaStartingTree", true);
-            chainLength = getIntegerChild(mcmcElement, "chainLength", 100000000);
-            logEvery = getIntegerChild(mcmcElement, "logEvery", 1000);
-            echoEvery = getIntegerChild(mcmcElement, "echoEvery", 1000);
-            logFileName = getStringChild(mcmcElement, "logFileName", null);
-            treeFileName = getStringChild(mcmcElement, "treeFileName", null);
-            mapTreeLog = getBooleanChild(mcmcElement, "mapTreeLog", false);
-            mapTreeFileName = getStringChild(mcmcElement, "mapTreeFileName", null);
-            substTreeLog = getBooleanChild(mcmcElement, "substTreeLog", false);
-            substTreeFileName = getStringChild(mcmcElement, "substTreeFileName", null);
-        }
-    }
-
-    private String getStringChild(Element element, String childName, String defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null || value.length() == 0) return defaultValue;
-        return value;
-    }
-
-    private int getIntegerChild(Element element, String childName, int defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return Integer.parseInt(value);
-    }
-
-    private double getDoubleChild(Element element, String childName, double defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return Double.parseDouble(value);
-    }
-
-    private boolean getBooleanChild(Element element, String childName, boolean defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return value.equals("true");
-    }
-
-    public void guessDates() {
-
-        for (int i = 0; i < originalAlignment.getTaxonCount(); i++) {
-            java.util.Date origin = new java.util.Date(0);
-
-            double d = 0.0;
-
-            try {
-                if (guessDateFromOrder) {
-                    d = guessDateFromOrder(originalAlignment.getTaxonId(i), order, fromLast);
-                } else {
-                    d = guessDateFromPrefix(originalAlignment.getTaxonId(i), prefix);
-                }
-
-            } catch (GuessDatesException gfe) {
-                //
-            }
-
-            if (offset > 0) {
-                if (unlessLessThan > 0) {
-                    if (d < unlessLessThan) {
-                        d += offset2;
-                    } else {
-                        d += offset;
-                    }
-                } else {
-                    d += offset;
-                }
-            }
-
-            Date date = Date.createTimeSinceOrigin(d, Units.Type.YEARS, origin);
-            originalAlignment.getTaxon(i).setAttribute("date", date);
-        }
-
-        // adjust the dates to the current timescale...
-        timeScaleChanged();
-    }
-
-    public double guessDateFromOrder(String label, int order, boolean fromLast) throws GuessDatesException {
-
-        String field;
-
-        if (fromLast) {
-            int count = 0;
-            int i = label.length() - 1;
-
-            char c = label.charAt(i);
-
-            do {
-                // first find a part of a number
-                while (!Character.isDigit(c) && c != '.') {
-                    i--;
-                    if (i < 0) break;
-                    c = label.charAt(i);
-                }
-
-                if (i < 0) throw new GuessDatesException("Missing number field in taxon label, " + label);
-
-                int j = i + 1;
-
-                // now find the beginning of the number
-                while (Character.isDigit(c) || c == '.') {
-                    i--;
-                    if (i < 0) break;
-                    c = label.charAt(i);
-                }
-
-                field = label.substring(i + 1, j);
-
-                count++;
-
-            } while (count <= order);
-
-        } else {
-            int count = 0;
-            int i = 0;
-
-            char c = label.charAt(i);
-
-            do {
-                // first find a part of a number
-                while (!Character.isDigit(c) && c != '.') {
-                    i++;
-                    if (i == label.length()) break;
-                    c = label.charAt(i);
-                }
-                int j = i;
-
-                if (i == label.length()) throw new GuessDatesException("Missing number field in taxon label, " + label);
-
-                // now find the beginning of the number
-                while (Character.isDigit(c) || c == '.') {
-                    i++;
-                    if (i == label.length()) break;
-                    c = label.charAt(i);
-                }
-
-                field = label.substring(j, i);
-
-                count++;
-
-            } while (count <= order);
-        }
-
-        return Double.parseDouble(field);
-    }
-
-    public double guessDateFromPrefix(String label, String prefix) throws GuessDatesException {
-
-        int i = label.indexOf(prefix);
-
-        if (i == -1) throw new GuessDatesException("Missing prefix in taxon label, " + label);
-
-        i += prefix.length();
-        int j = i;
-
-        // now find the beginning of the number
-        char c = label.charAt(i);
-        while (i < label.length() - 1 && (Character.isDigit(c) || c == '.')) {
-            i++;
-            c = label.charAt(i);
-        }
-
-        if (i == j) throw new GuessDatesException("Missing field after prefix in taxon label, " + label);
-
-        String field = label.substring(j, i + 1);
-
-        double d;
-
-        try {
-            d = Double.parseDouble(field);
-        } catch (NumberFormatException nfe) {
-            throw new GuessDatesException("Badly formated date in taxon label, " + label);
-        }
-
-        return d;
-    }
-
-    private void timeScaleChanged() {
-
-        for (int i = 0; i < alignment.getTaxonCount(); i++) {
-            Date date = alignment.getTaxon(i).getDate();
-            double d = date.getTimeValue();
-
-            Date newDate = createDate(d, units, datesDirection == BACKWARDS, 0.0);
-
-            alignment.getTaxon(i).setDate(newDate);
-        }
-
-    }
-
-    private Date createDate(double timeValue, Units.Type units, boolean backwards, double origin) {
-        if (backwards) {
-            return Date.createTimeAgoFromOrigin(timeValue, units, origin);
-        } else {
-            return Date.createTimeSinceOrigin(timeValue, units, origin);
-        }
-    }
-
-    public class Parameter {
-
-        /**
-         * A constructor for "special" parameters which are not user-configurable
-         *
-         * @param name        the name
-         * @param description the description
-         */
-        public Parameter(String name, String description) {
-            this.name = name;
-            this.description = description;
-            this.scale = NONE;
-            this.isNodeHeight = false;
-            this.isStatistic = false;
-            this.taxa = null;
-            this.priorType = PriorType.NONE;
-            this.initial = Double.NaN;
-            this.lower = Double.NaN;
-            this.upper = Double.NaN;
-        }
-
-        public Parameter(String name, String description, int scale,
-                         double initial, double lower, double upper) {
-            this.name = name;
-            this.description = description;
-            this.initial = initial;
-            this.isNodeHeight = false;
-            this.isStatistic = false;
-
-            this.taxa = null;
-
-            this.priorType = PriorType.UNIFORM_PRIOR;
-            this.scale = scale;
-            this.priorEdited = false;
-            this.lower = lower;
-            this.upper = upper;
-
-            uniformLower = lower;
-            uniformUpper = upper;
-        }
-
-        public Parameter(TaxonList taxa, String description) {
-            this.taxa = taxa;
-            this.name = null;
-            this.description = description;
-
-            this.isNodeHeight = true;
-            this.isStatistic = true;
-            this.priorType = PriorType.NONE;
-            this.scale = TIME_SCALE;
-            this.priorEdited = false;
-            this.lower = 0.0;
-            this.upper = Double.MAX_VALUE;
-
-            uniformLower = lower;
-            uniformUpper = upper;
-        }
-
-        public Parameter(String name, String description, boolean isDiscrete) {
-            this.taxa = null;
-
-            this.name = name;
-            this.description = description;
-
-            this.isNodeHeight = false;
-            this.isStatistic = true;
-            this.isDiscrete = isDiscrete;
-            this.priorType = PriorType.UNIFORM_PRIOR;
-            this.scale = NONE;
-            this.priorEdited = false;
-            this.initial = Double.NaN;
-            this.lower = Double.NaN;
-            this.upper = Double.NaN;
-        }
-
-        public Parameter(String name, String description, double lower, double upper) {
-            this.taxa = null;
-
-            this.name = name;
-            this.description = description;
-
-            this.isNodeHeight = false;
-            this.isStatistic = true;
-            this.isDiscrete = false;
-            this.priorType = PriorType.UNIFORM_PRIOR;
-            this.scale = NONE;
-            this.priorEdited = false;
-            this.initial = Double.NaN;
-            this.lower = lower;
-            this.upper = upper;
-
-            uniformLower = lower;
-            uniformUpper = upper;
-        }
-
-        public Parameter(String name, String description, boolean isNodeHeight,
-                         double initial, double lower, double upper) {
-            this.name = name;
-            this.description = description;
-            this.initial = initial;
-
-            this.taxa = null;
-
-            this.isNodeHeight = isNodeHeight;
-            this.isStatistic = false;
-            this.priorType = PriorType.NONE;
-            this.scale = TIME_SCALE;
-            this.priorEdited = false;
-            this.lower = lower;
-            this.upper = upper;
-
-            uniformLower = lower;
-            uniformUpper = upper;
-        }
-
-        public String getName() {
-            if (taxa != null) {
-                return "tmrca(" + taxa.getId() + ")";
-            } else {
-                return name;
-            }
-        }
-
-        public String getXMLName() {
-            if (taxa != null) {
-                return "tmrca_" + taxa.getId();
-            } else {
-                return name;
-            }
-        }
-
-        public String getDescription() {
-            if (taxa != null) {
-                return description + taxa.getId();
-            } else {
-                return description;
-            }
-        }
-
-        private final String name;
-        private final String description;
-        public double initial;
-
-        public final TaxonList taxa;
-
-        public boolean isDiscrete = false;
-
-        public boolean isFixed = false;
-        public final boolean isNodeHeight;
-        public final boolean isStatistic;
-
-        public PriorType priorType;
-        public boolean priorEdited;
-        public final int scale;
-        public double lower;
-        public double upper;
-
-        public double uniformUpper = 0.0;
-        public double uniformLower = 0.0;
-        public double exponentialMean = 1.0;
-        public double exponentialOffset = 0.0;
-        public double normalMean = 1.0;
-        public double normalStdev = 1.0;
-        public double logNormalMean = 0.0;
-        public double logNormalStdev = 1.0;
-        public double logNormalOffset = 0.0;
-        public double gammaAlpha = 1.0;
-        public double gammaBeta = 1.0;
-        public double gammaOffset = 0.0;
-        public double poissonMean = 1.0;
-        public double poissonOffset = 0.0;
-    }
-
-    public class Operator {
-        public Operator(String name, String description, Parameter parameter, String operatorType, double tuning, double weight) {
-            this.name = name;
-            this.description = description;
-            this.parameter1 = parameter;
-            this.parameter2 = null;
-
-            this.type = operatorType;
-            this.tuningEdited = false;
-            this.tuning = tuning;
-            this.weight = weight;
-
-            this.inUse = true;
-        }
-
-        public Operator(String name, String description,
-                        Parameter parameter1, Parameter parameter2,
-                        String operatorType, double tuning, double weight) {
-            this.name = name;
-            this.description = description;
-            this.parameter1 = parameter1;
-            this.parameter2 = parameter2;
-
-            this.type = operatorType;
-            this.tuningEdited = false;
-            this.tuning = tuning;
-            this.weight = weight;
-
-            this.inUse = true;
-        }
-
-        public String getDescription() {
-            if (description == null || description.length() == 0) {
-                String prefix = "";
-                if (type.equals(SCALE)) {
-                    prefix = "Scales the ";
-                } else if (type.equals(RANDOM_WALK)) {
-                    prefix = "A random-walk on the ";
-                }
-                return prefix + parameter1.getDescription();
-            } else {
-                return description;
-            }
-        }
-
-        public boolean isTunable() {
-            return tuning > 0;
-        }
-
-        public final String name;
-        public final String description;
-
-        public final String type;
-        public boolean tuningEdited;
-        public double tuning;
-        public double weight;
-        public boolean inUse;
-
-        public final Parameter parameter1;
-        public final Parameter parameter2;
-
-    }
-
-    public static final String version = "1.4";
-    public static final int YEARS = 0;
-    public static final int MONTHS = 1;
-    public static final int DAYS = 2;
-    public static final int FORWARDS = 0;
-    public static final int BACKWARDS = 1;
-    public static final int NONE = -1;
-
-    public static final int JC = 0;
-    public static final int HKY = 1;
-    public static final int GTR = 2;
-
-    public static final int BLOSUM_62 = 0;
-    public static final int DAYHOFF = 1;
-    public static final int JTT = 2;
-    public static final int MT_REV_24 = 3;
-    public static final int CP_REV_45 = 4;
-    public static final int WAG = 5;
-
-    public static final int BIN_SIMPLE = 0;
-    public static final int BIN_COVARION = 1;
-
-    public static final int ESTIMATED = 0;
-    public static final int EMPIRICAL = 1;
-    public static final int ALLEQUAL = 2;
-
-    public static final int CONSTANT = 0;
-    public static final int EXPONENTIAL = 1;
-    public static final int LOGISTIC = 2;
-    public static final int EXPANSION = 3;
-    public static final int SKYLINE = 4;
-    public static final int EXTENDED_SKYLINE = 5;
-    public static final int YULE = 6;
-    public static final int BIRTH_DEATH = 7;
-
-    public static final int STRICT_CLOCK = 0;
-    public static final int UNCORRELATED_EXPONENTIAL = 1;
-    public static final int UNCORRELATED_LOGNORMAL = 2;
-    public static final int RANDOM_LOCAL_CLOCK = 3;
-
-    public static final int GROWTH_RATE = 0;
-    public static final int DOUBLING_TIME = 1;
-    public static final int CONSTANT_SKYLINE = 0;
-    public static final int LINEAR_SKYLINE = 1;
-
-    public static final int TIME_SCALE = 0;
-    public static final int GROWTH_RATE_SCALE = 1;
-    public static final int BIRTH_RATE_SCALE = 2;
-    public static final int SUBSTITUTION_RATE_SCALE = 3;
-    public static final int LOG_STDEV_SCALE = 4;
-    public static final int SUBSTITUTION_PARAMETER_SCALE = 5;
-    public static final int T50_SCALE = 6;
-    public static final int UNITY_SCALE = 7;
-
-    public static final String SCALE = "scale";
-    public static final String RANDOM_WALK = "randomWalk";
-    public static final String INTEGER_RANDOM_WALK = "integerRandomWalk";
-    public static final String UP_DOWN = "upDown";
-    public static final String SCALE_ALL = "scaleAll";
-    public static final String CENTERED_SCALE = "centeredScale";
-    public static final String DELTA_EXCHANGE = "deltaExchange";
-    public static final String INTEGER_DELTA_EXCHANGE = "integerDeltaExchange";
-    public static final String SWAP = "swap";
-    public static final String BITFLIP = "bitFlip";
-    public static final String TREE_BIT_MOVE = "treeBitMove";
-    public static final String SAMPLE_NONACTIVE = "sampleNoneActiveOperator";
-    public static final String SCALE_WITH_INDICATORS = "scaleWithIndicators";
-
-    public static final String UNIFORM = "uniform";
-    public static final String INTEGER_UNIFORM = "integerUniform";
-    public static final String SUBTREE_SLIDE = "subtreeSlide";
-    public static final String NARROW_EXCHANGE = "narrowExchange";
-    public static final String WIDE_EXCHANGE = "wideExchange";
-    public static final String WILSON_BALDING = "wilsonBalding";
-    public String fileNameStem = "untitled";
-    public String logFileName = null;
-    public String treeFileName = null;
-    public boolean mapTreeLog = false;
-    public String mapTreeFileName = null;
-    public boolean substTreeLog = false;
-    public String substTreeFileName = null;
-
-    // Data options
-    public int dataType = DataType.NUCLEOTIDES;
-
-    public TaxonList taxonList = null;
-    public SimpleAlignment originalAlignment = null;
-    public List<Taxa> taxonSets = new ArrayList<Taxa>();
-    public Map<Taxa, Boolean> taxonSetsMono = new HashMap<Taxa, Boolean>();
-    public Alignment alignment = null;
-    public Tree tree = null;
-    public boolean alignmentReset = true;
-    public double meanDistance = 1.0;
-    public int datesUnits = YEARS;
-    public int datesDirection = FORWARDS;
-    public double maximumTipHeight = 0.0;
-    public int translation = 0;
-    public boolean userTree = false;
-
-    public boolean guessDates = false;
-    public boolean guessDateFromOrder = true;
-    public boolean fromLast = false;
-    public int order = 0;
-    public String prefix;
-    public double offset = 0.0;
-    public double unlessLessThan = 0.0;
-    public double offset2 = 0.0;
-
-    // Model options
-    public int partitionCount = 1;
-    public int nucSubstitutionModel = HKY;
-    public int aaSubstitutionModel = BLOSUM_62;
-    public int binarySubstitutionModel = BIN_SIMPLE;
-
-    public int frequencyPolicy = ESTIMATED;
-
-    public boolean gammaHetero = false;
-    public int gammaCategories = 4;
-    public boolean invarHetero = false;
-    public String codonHeteroPattern = null;
-    public double meanSubstitutionRate = 1.0;
-    public boolean unlinkedSubstitutionModel = false;
-    public boolean unlinkedHeterogeneityModel = false;
-    public boolean unlinkedFrequencyModel = false;
-    public int nodeHeightPrior = CONSTANT;
-    public int parameterization = GROWTH_RATE;
-    public int skylineGroupCount = 10;
-    public int skylineModel = CONSTANT_SKYLINE;
-    public String extendedSkylineModel = VariableDemographicModel.Type.LINEAR.toString();
-    public double birthDeathSamplingProportion = 1.0;
-    public boolean fixedTree = false;
-    public Units.Type units = Units.Type.SUBSTITUTIONS;
-    public boolean fixedSubstitutionRate = false;
-    public boolean hasSetFixedSubstitutionRate = false;
-    public int clockModel = STRICT_CLOCK;
-
-    // MCMC options
-    public boolean upgmaStartingTree = false;
-    public int chainLength = 10000000;
-    public int logEvery = 1000;
-    public int echoEvery = 1000;
-    public int burnIn = 100000;
-    public String fileName = null;
-    public boolean autoOptimize = true;
-    public boolean performTraceAnalysis = false;
-    public boolean generateCSV = true;  // until/if a button
-    public boolean samplePriorOnly = false;
-
-    public HashMap<String, Parameter> parameters = new HashMap<String, Parameter>();
-    public HashMap<TaxonList, Parameter> statistics = new HashMap<TaxonList, Parameter>();
-    public HashMap<String, Operator> operators = new HashMap<String, Operator>();
-    public Parameter localClockRateChangesStatistic = null;
-    public Parameter localClockRatesStatistic = null;
-}
diff --git a/src/dr/app/oldbeauti/BeautiTester.java b/src/dr/app/oldbeauti/BeautiTester.java
deleted file mode 100644
index df029e4..0000000
--- a/src/dr/app/oldbeauti/BeautiTester.java
+++ /dev/null
@@ -1,417 +0,0 @@
-/*
- * BeautiTester.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.evolution.alignment.ConvertAlignment;
-import dr.evolution.alignment.SimpleAlignment;
-import dr.evolution.datatype.AminoAcids;
-import dr.evolution.datatype.GeneticCode;
-import dr.evolution.io.Importer;
-import dr.evolution.io.NexusImporter;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.Date;
-import dr.evolution.util.TimeScale;
-import dr.evolution.util.Units;
-
-import java.io.*;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: BeautiTester.java,v 1.2 2005/07/11 14:07:25 rambaut Exp $
- */
-public class BeautiTester {
-
-    PrintWriter scriptWriter;
-
-    public BeautiTester() {
-        BeastGenerator beautiOptions = createOptions();
-
-        try {
-            scriptWriter = new PrintWriter(new FileWriter("tests/run_script.sh"));
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        importFromFile("examples/Primates.nex", beautiOptions);
-
-        buildNucModels("tests/pri_", beautiOptions);
-        buildAAModels("tests/pri_", beautiOptions);
-
-        importFromFile("examples/Dengue4.env.nex", beautiOptions);
-        beautiOptions.fixedSubstitutionRate = false;
-
-        buildNucModels("tests/den_", beautiOptions);
-        buildAAModels("tests/den_", beautiOptions);
-
-        scriptWriter.close();
-    }
-
-    public BeastGenerator createOptions() {
-
-        BeastGenerator beautiOptions = new BeastGenerator();
-
-        beautiOptions.fileNameStem = "";
-        beautiOptions.substTreeLog = false;
-        beautiOptions.substTreeFileName = null;
-
-        // MCMC options
-        beautiOptions.chainLength = 100;
-        beautiOptions.logEvery = 100;
-        beautiOptions.echoEvery = 100;
-        beautiOptions.burnIn = 10;
-        beautiOptions.fileName = null;
-        beautiOptions.autoOptimize = true;
-
-        // Data options
-        beautiOptions.taxonList = null;
-        beautiOptions.originalAlignment = null;
-        beautiOptions.alignment = null;
-        beautiOptions.tree = null;
-
-        beautiOptions.datesUnits = BeautiOptions.YEARS;
-        beautiOptions.datesDirection = BeautiOptions.FORWARDS;
-
-        beautiOptions.userTree = false;
-        beautiOptions.fixedTree = false;
-
-        beautiOptions.performTraceAnalysis = false;
-        beautiOptions.generateCSV = true;  // until beuati button
-
-        beautiOptions.units = Units.Type.SUBSTITUTIONS;
-        beautiOptions.maximumTipHeight = 0.0;
-
-        beautiOptions.meanSubstitutionRate = 1.0;
-        beautiOptions.fixedSubstitutionRate = true;
-        return beautiOptions;
-    }
-
-    public void buildNucModels(String key, BeastGenerator beautiOptions) {
-        beautiOptions.alignment = beautiOptions.originalAlignment;
-
-        beautiOptions.nucSubstitutionModel = BeautiOptions.HKY;
-        buildCodonModels(key+"HKY", beautiOptions);
-        beautiOptions.nucSubstitutionModel = BeautiOptions.GTR;
-        buildCodonModels(key+"GTR", beautiOptions);
-    }
-
-    public void buildCodonModels(String key, BeastGenerator beautiOptions) {
-        beautiOptions.codonHeteroPattern = null;
-        beautiOptions.unlinkedSubstitutionModel = false;
-        beautiOptions.unlinkedHeterogeneityModel = false;
-        buildHeteroModels(key+"", beautiOptions);
-
-        beautiOptions.codonHeteroPattern = "123";
-        buildHeteroModels(key+"+C123", beautiOptions);
-
-        beautiOptions.unlinkedSubstitutionModel = true;
-        beautiOptions.unlinkedHeterogeneityModel = false;
-        buildHeteroModels(key+"+C123^S", beautiOptions);
-
-        beautiOptions.unlinkedSubstitutionModel = false;
-        beautiOptions.unlinkedHeterogeneityModel = true;
-        buildHeteroModels(key+"+C123^H", beautiOptions);
-
-        beautiOptions.unlinkedSubstitutionModel = true;
-        beautiOptions.unlinkedHeterogeneityModel = true;
-        buildHeteroModels(key+"+C123^SH", beautiOptions);
-
-        beautiOptions.codonHeteroPattern = "112";
-	    buildHeteroModels(key+"+C112", beautiOptions);
-
-	    beautiOptions.unlinkedSubstitutionModel = true;
-	    beautiOptions.unlinkedHeterogeneityModel = false;
-	    buildHeteroModels(key+"+C112^S", beautiOptions);
-
-	    beautiOptions.unlinkedSubstitutionModel = false;
-	    beautiOptions.unlinkedHeterogeneityModel = true;
-	    buildHeteroModels(key+"+C112^H", beautiOptions);
-
-	    beautiOptions.unlinkedSubstitutionModel = true;
-	    beautiOptions.unlinkedHeterogeneityModel = true;
-	    buildHeteroModels(key+"+C112^SH", beautiOptions);
-
-    }
-
-    public void buildHeteroModels(String key, BeastGenerator beautiOptions) {
-
-        beautiOptions.gammaHetero = false;
-        beautiOptions.gammaCategories = 4;
-        beautiOptions.invarHetero = false;
-        buildTreePriorModels(key+"", beautiOptions);
-
-        beautiOptions.gammaHetero = true;
-        beautiOptions.invarHetero = false;
-        buildTreePriorModels(key+"+G", beautiOptions);
-
-        beautiOptions.gammaHetero = false;
-        beautiOptions.invarHetero = true;
-        buildTreePriorModels(key+"+I", beautiOptions);
-
-        beautiOptions.gammaHetero = true;
-        beautiOptions.invarHetero = true;
-        buildTreePriorModels(key+"+GI", beautiOptions);
-    }
-
-    public void buildAAModels(String key, BeastGenerator beautiOptions) {
-
-        beautiOptions.alignment = new ConvertAlignment(AminoAcids.INSTANCE, GeneticCode.UNIVERSAL, beautiOptions.originalAlignment);
-        /*
-        beautiOptions.aaSubstitutionModel = BeautiOptions.BLOSUM_62;
-        buildHeteroModels(key+"BLOSUM62", beautiOptions);
-
-        beautiOptions.aaSubstitutionModel = BeautiOptions.CP_REV_45;
-        buildHeteroModels(key+"CPREV45", beautiOptions);
-
-        beautiOptions.aaSubstitutionModel = BeautiOptions.DAYHOFF;
-        buildHeteroModels(key+"DAYHOFF", beautiOptions);
-
-        beautiOptions.aaSubstitutionModel = BeautiOptions.JTT;
-        buildHeteroModels(key+"JTT", beautiOptions);
-
-        beautiOptions.aaSubstitutionModel = BeautiOptions.MT_REV_24;
-        buildHeteroModels(key+"MTREV24", beautiOptions);
-        */
-        beautiOptions.aaSubstitutionModel = BeautiOptions.WAG;
-        buildHeteroModels(key+"WAG", beautiOptions);
-    }
-
-    public void buildTreePriorModels(String key, BeastGenerator beautiOptions) {
-
-        beautiOptions.nodeHeightPrior = BeautiOptions.CONSTANT;
-        buildClockModels(key+"+CP", beautiOptions);
-
-        beautiOptions.nodeHeightPrior = BeautiOptions.EXPONENTIAL;
-        beautiOptions.parameterization = BeautiOptions.GROWTH_RATE;
-        buildClockModels(key+"+EG", beautiOptions);
-
-        beautiOptions.nodeHeightPrior = BeautiOptions.LOGISTIC;
-        beautiOptions.parameterization = BeautiOptions.GROWTH_RATE;
-        buildClockModels(key+"+LG", beautiOptions);
-
-        beautiOptions.nodeHeightPrior = BeautiOptions.EXPANSION;
-        beautiOptions.parameterization = BeautiOptions.GROWTH_RATE;
-        buildClockModels(key+"+XG", beautiOptions);
-
-        beautiOptions.nodeHeightPrior = BeautiOptions.SKYLINE;
-        beautiOptions.skylineGroupCount = 3;
-        beautiOptions.skylineModel = BeautiOptions.CONSTANT_SKYLINE;
-        buildClockModels(key+"+SKC", beautiOptions);
-
-        beautiOptions.skylineModel = BeautiOptions.LINEAR_SKYLINE;
-        buildClockModels(key+"+SKL", beautiOptions);
-
-    }
-
-    public void buildClockModels(String key, BeastGenerator beautiOptions) {
-        beautiOptions.clockModel = BeautiOptions.STRICT_CLOCK;
-        generate(key+"+CLOC", beautiOptions);
-        beautiOptions.clockModel = BeautiOptions.UNCORRELATED_EXPONENTIAL;
-        generate(key+"+UCED", beautiOptions);
-        beautiOptions.clockModel = BeautiOptions.UNCORRELATED_LOGNORMAL;
-        generate(key+"+UCLD", beautiOptions);
-    }
-
-    public void generate(String name, BeastGenerator beautiOptions) {
-        beautiOptions.logFileName = name + ".log";
-        beautiOptions.treeFileName = name + ".trees";
-
-        System.out.println("Generating: " + name);
-        String fileName = name + ".xml";
-        try {
-            FileWriter fw = new FileWriter(fileName);
-            beautiOptions.generateXML(fw);
-            fw.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        scriptWriter.println("beast " + fileName);
-    }
-
-    protected void importFromFile(String fileName, BeastGenerator beautiOptions) {
-
-        try {
-            FileReader reader = new FileReader(fileName);
-
-            NexusApplicationImporter importer = new NexusApplicationImporter(reader);
-
-            boolean done = false;
-
-            beautiOptions.originalAlignment = null;
-            beautiOptions.alignment = null;
-            beautiOptions.tree = null;
-            beautiOptions.taxonList = null;
-
-            while (!done) {
-                try {
-
-                    NexusImporter.NexusBlock block = importer.findNextBlock();
-
-                    if (block == NexusImporter.TAXA_BLOCK) {
-
-                        if (beautiOptions.taxonList != null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block already defined");
-                        }
-
-                        beautiOptions.taxonList = importer.parseTaxaBlock();
-
-                    } else if (block == NexusImporter.CALIBRATION_BLOCK) {
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a CALIBRATION block");
-                        }
-
-                        importer.parseCalibrationBlock(beautiOptions.taxonList);
-
-                    } else if (block == NexusImporter.CHARACTERS_BLOCK) {
-
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block must be defined before a CHARACTERS block");
-                        }
-
-                        if (beautiOptions.originalAlignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        beautiOptions.originalAlignment = (SimpleAlignment)importer.parseCharactersBlock(beautiOptions.taxonList);
-
-                    } else if (block == NexusImporter.DATA_BLOCK) {
-
-                        if (beautiOptions.originalAlignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        // A data block doesn't need a taxon block before it
-                        // but if one exists then it will use it.
-                        beautiOptions.originalAlignment = (SimpleAlignment)importer.parseDataBlock(beautiOptions.taxonList);
-                        if (beautiOptions.taxonList == null) {
-                            beautiOptions.taxonList = beautiOptions.originalAlignment;
-                        }
-
-                    } else if (block == NexusImporter.TREES_BLOCK) {
-
-                        if (beautiOptions.taxonList == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a TREES block");
-                        }
-
-                        if (beautiOptions.tree != null) {
-                            throw new NexusImporter.MissingBlockException("TREES block already defined");
-                        }
-
-                        Tree[] trees = importer.parseTreesBlock(beautiOptions.taxonList);
-                        if (trees.length > 0) {
-                            beautiOptions.tree = trees[0];
-                        }
-
-/*					} else if (block == NexusApplicationImporter.PAUP_BLOCK) {
-
-						importer.parsePAUPBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.MRBAYES_BLOCK) {
-
-						importer.parseMrBayesBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.RHINO_BLOCK) {
-
-						importer.parseRhinoBlock(beautiOptions);
-*/
-                    } else {
-                        // Ignore the block..
-                    }
-
-                } catch (EOFException ex) {
-                    done = true;
-                }
-            }
-
-            if (beautiOptions.originalAlignment == null) {
-                throw new NexusImporter.MissingBlockException("DATA or CHARACTERS block is missing");
-            }
-
-        } catch (FileNotFoundException fnfe) {
-            System.err.println("File not found: " + fnfe);
-            System.exit(1);
-
-        } catch (Importer.ImportException ime) {
-            System.err.println("Error parsing imported file: " + ime);
-            System.exit(1);
-        } catch (IOException ioex) {
-            System.err.println("File I/O Error: " + ioex);
-            System.exit(1);
-        } catch (Exception ex) {
-            System.err.println("Fatal exception: " + ex);
-            System.exit(1);
-        }
-
-        // make sure they all have dates...
-        for (int i = 0; i < beautiOptions.originalAlignment.getTaxonCount(); i++) {
-            if (beautiOptions.originalAlignment.getTaxonAttribute(i, "date") == null) {
-                java.util.Date origin = new java.util.Date(0);
-
-                dr.evolution.util.Date date = dr.evolution.util.Date.createTimeSinceOrigin(0.0, Units.Type.YEARS, origin);
-                beautiOptions.originalAlignment.getTaxon(i).setAttribute("date", date);
-            }
-        }
-
-        beautiOptions.alignment = beautiOptions.originalAlignment;
-        beautiOptions.taxonList = beautiOptions.originalAlignment;
-
-        calculateHeights(beautiOptions);
-    }
-
-    private void calculateHeights(BeautiOptions options) {
-
-        options.maximumTipHeight = 0.0;
-        if (options.alignment == null) return;
-
-        dr.evolution.util.Date mostRecent = null;
-        for (int i = 0; i < options.alignment.getSequenceCount(); i++) {
-            Date date = options.alignment.getTaxon(i).getDate();
-            if ((date != null) && (mostRecent == null || date.after(mostRecent))) {
-                mostRecent = date;
-            }
-        }
-
-        if (mostRecent != null) {
-            TimeScale timeScale = new TimeScale(mostRecent.getUnits(), true, mostRecent.getAbsoluteTimeValue());
-            double time0 = timeScale.convertTime(mostRecent.getTimeValue(), mostRecent);
-
-            for (int i = 0; i < options.alignment.getSequenceCount(); i++) {
-                Date date = options.alignment.getTaxon(i).getDate();
-                if (date != null) {
-                    double height = timeScale.convertTime(date.getTimeValue(), date) - time0;
-                    if (height > options.maximumTipHeight) options.maximumTipHeight = height;
-                }
-            }
-        }
-    }
-
-	//Main method
-	public static void main(String[] args) {
-
-		new BeautiTester();
-	}
-}
-
diff --git a/src/dr/app/oldbeauti/CommandLineBeauti.java b/src/dr/app/oldbeauti/CommandLineBeauti.java
deleted file mode 100755
index f2246c3..0000000
--- a/src/dr/app/oldbeauti/CommandLineBeauti.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * CommandLineBeauti.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-package dr.app.oldbeauti;
-
-import dr.evolution.alignment.Patterns;
-import dr.evolution.alignment.SimpleAlignment;
-import dr.evolution.distance.DistanceMatrix;
-import dr.evolution.distance.JukesCantorDistanceMatrix;
-import dr.evolution.io.Importer;
-import dr.evolution.io.NexusImporter;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.Units;
-import org.jdom.Document;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-
-import java.io.*;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: BeautiFrame.java,v 1.22 2006/09/09 16:07:06 rambaut Exp $
- */
-public class CommandLineBeauti {
-	private BeastGenerator beautiOptions = new BeastGenerator();
-
-	public CommandLineBeauti(String inputFileName, String templateFileName, String outputFileName) {
-
-		try {
-			if (!importFromFile(new File(inputFileName))) {
-				return;
-			}
-		} catch (FileNotFoundException fnfe) {
-			System.err.println("Error: Input file not found");
-			return;
-		} catch (IOException ioe) {
-			System.err.println("Error reading input file: " + ioe.getMessage());
-			return;
-		}
-
-		try {
-			if (!readFromFile(new File(templateFileName))) {
-				return;
-			}
-		} catch (FileNotFoundException fnfe) {
-			System.err.println("Error: Template file not found");
-			return;
-		} catch (IOException ioe) {
-			System.err.println("Error reading template file: " + ioe.getMessage());
-			return;
-		}
-
-		beautiOptions.guessDates();
-
-		try {
-			generate(new File(outputFileName));
-
-		} catch (IOException ioe) {
-			System.err.println("Unable to generate file: " + ioe.getMessage());
-			return;
-		}
-	}
-
-	private boolean readFromFile(File file) throws FileNotFoundException, IOException {
-		try {
-			SAXBuilder parser = new SAXBuilder();
-			Document doc = parser.build(file);
-			beautiOptions.parse(doc);
-
-		} catch (dr.xml.XMLParseException xpe) {
-			System.err.println("Error reading file: This may not be a BEAUti Template file");
-			System.err.println(xpe.getMessage());
-			return false;
-		} catch (JDOMException e) {
-			System.err.println("Unable to open file: This may not be a BEAUti Template file");
-			System.err.println(e.getMessage());
-			return false;
-		}
-		return true;
-	}
-
-	private boolean importFromFile(File file) throws FileNotFoundException, IOException {
-
-		try {
-			FileReader reader = new FileReader(file);
-
-			NexusApplicationImporter importer = new NexusApplicationImporter(reader);
-
-			boolean done = false;
-
-			beautiOptions.originalAlignment = null;
-			beautiOptions.alignment = null;
-			beautiOptions.tree = null;
-			beautiOptions.taxonList = null;
-
-			while (!done) {
-				try {
-
-					NexusImporter.NexusBlock block = importer.findNextBlock();
-
-					if (block == NexusImporter.TAXA_BLOCK) {
-
-						if (beautiOptions.taxonList != null) {
-							throw new NexusImporter.MissingBlockException("TAXA block already defined");
-						}
-
-						beautiOptions.taxonList = importer.parseTaxaBlock();
-
-					} else if (block == NexusImporter.CALIBRATION_BLOCK) {
-						if (beautiOptions.taxonList == null) {
-							throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a CALIBRATION block");
-						}
-
-						importer.parseCalibrationBlock(beautiOptions.taxonList);
-
-					} else if (block == NexusImporter.CHARACTERS_BLOCK) {
-
-						if (beautiOptions.taxonList == null) {
-							throw new NexusImporter.MissingBlockException("TAXA block must be defined before a CHARACTERS block");
-						}
-
-						if (beautiOptions.originalAlignment != null) {
-							throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-						}
-
-						beautiOptions.originalAlignment = (SimpleAlignment)importer.parseCharactersBlock(beautiOptions.taxonList);
-
-					} else if (block == NexusImporter.DATA_BLOCK) {
-
-						if (beautiOptions.originalAlignment != null) {
-							throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-						}
-
-						// A data block doesn't need a taxon block before it
-						// but if one exists then it will use it.
-						beautiOptions.originalAlignment = (SimpleAlignment)importer.parseDataBlock(beautiOptions.taxonList);
-						if (beautiOptions.taxonList == null) {
-							beautiOptions.taxonList = beautiOptions.originalAlignment;
-						}
-
-					} else if (block == NexusImporter.TREES_BLOCK) {
-
-						if (beautiOptions.taxonList == null) {
-							throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a TREES block");
-						}
-
-						if (beautiOptions.tree != null) {
-							throw new NexusImporter.MissingBlockException("TREES block already defined");
-						}
-
-						Tree[] trees = importer.parseTreesBlock(beautiOptions.taxonList);
-						if (trees.length > 0) {
-							beautiOptions.tree = trees[0];
-						}
-
-/*					} else if (block == NexusApplicationImporter.PAUP_BLOCK) {
-
-						importer.parsePAUPBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.MRBAYES_BLOCK) {
-
-						importer.parseMrBayesBlock(beautiOptions);
-
-					} else if (block == NexusApplicationImporter.RHINO_BLOCK) {
-
-						importer.parseRhinoBlock(beautiOptions);
-*/
-					} else {
-						// Ignore the block..
-					}
-
-				} catch (EOFException ex) {
-					done = true;
-				}
-			}
-
-			if (beautiOptions.originalAlignment == null) {
-				throw new NexusImporter.MissingBlockException("DATA or CHARACTERS block is missing");
-			}
-
-		} catch (Importer.ImportException ime) {
-			System.err.println("Error parsing imported file: " + ime);
-			return false;
-		} catch (IOException ioex) {
-			System.err.println("File I/O Error: " + ioex);
-			return false;
-		} catch (Exception ex) {
-			System.err.println("Fatal exception: " + ex);
-			return false;
-		}
-
-		// check the taxon names for invalid characters
-		boolean foundAmp = false;
-		for (int i = 0; i < beautiOptions.originalAlignment.getTaxonCount(); i++) {
-			String name = beautiOptions.originalAlignment.getTaxon(i).getId();
-			if (name.indexOf('&') >= 0) {
-				foundAmp = true;
-			}
-		}
-		if (foundAmp) {
-			System.err.println("One or more taxon names include an illegal character ('&').\n" +
-					"These characters will prevent BEAST from reading the resulting XML file.\n\n" +
-					"Please edit the taxon name(s) before generating the BEAST file.");
-		}
-
-
-		// make sure they all have dates...
-		for (int i = 0; i < beautiOptions.originalAlignment.getTaxonCount(); i++) {
-			if (beautiOptions.originalAlignment.getTaxonAttribute(i, "date") == null) {
-				java.util.Date origin = new java.util.Date(0);
-
-				dr.evolution.util.Date date = dr.evolution.util.Date.createTimeSinceOrigin(0.0, Units.Type.YEARS, origin);
-				beautiOptions.originalAlignment.getTaxon(i).setAttribute("date", date);
-			}
-		}
-
-		beautiOptions.fileNameStem = dr.app.util.Utils.trimExtensions(file.getName(),
-				new String[] {"nex", "NEX", "tre", "TRE", "nexus", "NEXUS"});
-
-		beautiOptions.alignment = beautiOptions.originalAlignment;
-		beautiOptions.alignmentReset = true;
-		if (beautiOptions.alignment != null) {
-			Patterns patterns = new Patterns(beautiOptions.alignment);
-			DistanceMatrix distances = new JukesCantorDistanceMatrix(patterns);
-			beautiOptions.meanDistance = distances.getMeanDistance();
-		}
-
-		return true;
-	}
-
-	private void generate(File file) throws IOException {
-		FileWriter fw = new FileWriter(file);
-		beautiOptions.generateXML(fw);
-		fw.close();
-	}
-
-
-
-}
diff --git a/src/dr/app/oldbeauti/DataPanel.java b/src/dr/app/oldbeauti/DataPanel.java
deleted file mode 100644
index e58e548..0000000
--- a/src/dr/app/oldbeauti/DataPanel.java
+++ /dev/null
@@ -1,699 +0,0 @@
-/*
- * DataPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.RealNumberField;
-import dr.evolution.alignment.ConvertAlignment;
-import dr.evolution.datatype.*;
-import dr.evolution.util.*;
-import dr.app.gui.table.*;
-import jam.framework.Exportable;
-import jam.panels.OptionsPanel;
-import jam.table.*;
-
-import javax.swing.*;
-import javax.swing.border.EmptyBorder;
-import javax.swing.event.ListSelectionEvent;
-import javax.swing.event.ListSelectionListener;
-import javax.swing.plaf.BorderUIResource;
-import javax.swing.table.AbstractTableModel;
-import javax.swing.table.TableModel;
-import java.awt.*;
-import java.awt.event.*;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: DataPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
- */
-public class DataPanel extends JPanel implements Exportable {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = 5283922195494563924L;
-    JScrollPane scrollPane = new JScrollPane();
-    JTable dataTable = null;
-    DataTableModel dataTableModel = null;
-
-    ClearDatesAction clearDatesAction = new ClearDatesAction();
-    GuessDatesAction guessDatesAction = new GuessDatesAction();
-
-    JComboBox unitsCombo = new JComboBox(new String[] {"Years", "Months", "Days"});
-    JComboBox directionCombo = new JComboBox(new String[] {"Since some time in the past", "Before the present"});
-    //RealNumberField originField = new RealNumberField(0.0, Double.POSITIVE_INFINITY);
-
-    JComboBox translationCombo = new JComboBox();
-
-    TableRenderer sequenceRenderer = null;
-
-    BeautiFrame frame = null;
-
-    BeautiOptions options = null;
-
-    double[] heights = null;
-
-    public DataPanel(BeautiFrame parent) {
-
-        this.frame = parent;
-
-        dataTableModel = new DataTableModel();
-        TableSorter sorter = new TableSorter(dataTableModel);
-        dataTable = new JTable(sorter);
-
-        sorter.setTableHeader(dataTable.getTableHeader());
-
-        dataTable.getTableHeader().setReorderingAllowed(false);
-        dataTable.getTableHeader().setDefaultRenderer(
-                new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-
-        dataTable.getColumnModel().getColumn(0).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        dataTable.getColumnModel().getColumn(0).setPreferredWidth(80);
-
-        dataTable.getColumnModel().getColumn(1).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        dataTable.getColumnModel().getColumn(1).setPreferredWidth(80);
-        dataTable.getColumnModel().getColumn(1).setCellEditor(
-                new DateCellEditor());
-
-        dataTable.getColumnModel().getColumn(2).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        dataTable.getColumnModel().getColumn(2).setPreferredWidth(80);
-
-        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(dataTable);
-
-        sequenceRenderer = new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4));
-        sequenceRenderer.setFont(new Font("Courier", Font.PLAIN, 12));
-
-        dataTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent evt) { selectionChanged(); }
-        });
-
-        scrollPane = new JScrollPane(dataTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
-        scrollPane.setOpaque(false);
-
-        clearDatesAction.setEnabled(false);
-
-        guessDatesAction.setEnabled(false);
-	    setupComponent(unitsCombo);
-        unitsCombo.setEnabled(false);
-	    setupComponent(directionCombo);
-        directionCombo.setEnabled(false);
-        //originField.setEnabled(false);
-        //originField.setValue(0.0);
-        //originField.setColumns(12);
-
-        JToolBar toolBar1 = new JToolBar();
-        toolBar1.setFloatable(false);
-        toolBar1.setOpaque(false);
-//		toolBar1.setLayout(new BoxLayout(toolBar1, javax.swing.BoxLayout.X_AXIS));
-        toolBar1.setLayout(new FlowLayout(java.awt.FlowLayout.LEFT, 0, 0));
-	    JButton button = new JButton(clearDatesAction);
-	    setupComponent(button);
-        toolBar1.add(button);
-	    button = new JButton(guessDatesAction);
-	    setupComponent(button);
-        toolBar1.add(button);
-        toolBar1.add(new JToolBar.Separator(new Dimension(12, 12)));
-        toolBar1.add(new JLabel("Dates specified as "));
-        toolBar1.add(unitsCombo);
-        toolBar1.add(directionCombo);
-        //toolBar.add(originField);
-
-
-        translationCombo.setOpaque(false);
-	    setupComponent(translationCombo);
-        translationCombo.addItem("None");
-        for (int i = 0; i < GeneticCode.GENETIC_CODE_DESCRIPTIONS.length; i++) {
-            translationCombo.addItem(GeneticCode.GENETIC_CODE_DESCRIPTIONS[i]);
-        }
-        translationCombo.setEnabled(false);
-        translationCombo.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                translationChanged();
-            }
-        });
-
-        JToolBar toolBar2 = new JToolBar();
-        toolBar2.setOpaque(false);
-        toolBar2.setFloatable(false);
-        toolBar2.setLayout(new FlowLayout(java.awt.FlowLayout.LEFT, 0, 0));
-        toolBar2.add(new JLabel("Translation:"));
-        toolBar2.add(translationCombo);
-
-        setOpaque(false);
-        setBorder(new BorderUIResource.EmptyBorderUIResource(new java.awt.Insets(12, 12, 12, 12)));
-        setLayout(new BorderLayout(0,0));
-        add(toolBar1, "North");
-        add(scrollPane, "Center");
-        add(toolBar2, "South");
-
-        ItemListener listener =	new ItemListener() {
-            public void itemStateChanged(ItemEvent ev) { timeScaleChanged(); }
-        };
-        unitsCombo.addItemListener(listener);
-        directionCombo.addItemListener(listener);
-        //originField.addKeyListener(new java.awt.event.KeyAdapter() {
-        //	public void keyTyped(java.awt.event.KeyEvent ev) {
-        //		timeScaleChanged();
-        //	}});
-
-    }
-
-	private void setupComponent(JComponent comp) {
-		comp.setOpaque(false);
-
-		//comp.setFont(UIManager.getFont("SmallSystemFont"));
-		//comp.putClientProperty("JComponent.sizeVariant", "small");
-		if (comp instanceof JButton) {
-			comp.putClientProperty("JButton.buttonType", "roundRect");
-		}
-		if (comp instanceof JComboBox) {
-			comp.putClientProperty("JComboBox.isSquare", Boolean.TRUE);
-		}
-
-	}
-
-    public final void dataChanged() {
-        calculateHeights();
-        frame.dataChanged();
-    }
-
-    public final void timeScaleChanged() {
-        Units.Type units = Units.Type.YEARS;
-        switch (unitsCombo.getSelectedIndex()) {
-            case 0: units = Units.Type.YEARS; break;
-            case 1: units = Units.Type.MONTHS; break;
-            case 2: units = Units.Type.DAYS; break;
-        }
-
-        boolean backwards = directionCombo.getSelectedIndex() == 1;
-
-        //double origin = originField.getValue().doubleValue();
-
-        for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
-            Date date = options.taxonList.getTaxon(i).getDate();
-            double d = date.getTimeValue();
-
-            Date newDate = createDate(d, units, backwards, 0.0);
-
-            options.taxonList.getTaxon(i).setDate(newDate);
-        }
-
-        calculateHeights();
-
-        dataTableModel.fireTableDataChanged();
-        frame.dataChanged();
-    }
-
-    private Date createDate(double timeValue, Units.Type units, boolean backwards, double origin) {
-        if (backwards) {
-            return Date.createTimeAgoFromOrigin(timeValue, units, origin);
-        } else {
-            return Date.createTimeSinceOrigin(timeValue, units, origin);
-        }
-    }
-
-    public final void translationChanged() {
-        int index = translationCombo.getSelectedIndex() - 1;
-
-        if (index < 0) {
-            options.alignment = options.originalAlignment;
-        } else {
-            options.alignment = new ConvertAlignment(AminoAcids.INSTANCE, GeneticCode.GENETIC_CODES[index],
-                    options.originalAlignment);
-        }
-
-        setupTable();
-
-        frame.dataChanged();
-    }
-
-    public void setOptions(BeautiOptions options) {
-
-        this.options = options;
-
-        if (options.taxonList != null) {
-            clearDatesAction.setEnabled(true);
-            guessDatesAction.setEnabled(true);
-            unitsCombo.setEnabled(true);
-            directionCombo.setEnabled(true);
-
-            //originField.setEnabled(true);
-
-            if (options.originalAlignment != null && options.originalAlignment.getDataType() == Nucleotides.INSTANCE) {
-                translationCombo.setEnabled(true);
-                translationCombo.setSelectedIndex(options.translation);
-            } else {
-                translationCombo.setEnabled(false);
-                translationCombo.setSelectedIndex(0);
-            }
-
-        }
-
-        setupTable();
-
-        unitsCombo.setSelectedIndex(options.datesUnits);
-        directionCombo.setSelectedIndex(options.datesDirection);
-
-        calculateHeights();
-
-        dataTableModel.fireTableDataChanged();
-    }
-
-    private void setupTable() {
-
-        dataTableModel.fireTableStructureChanged();
-        if (options.alignment != null) {
-
-            dataTable.getColumnModel().getColumn(3).setCellRenderer(sequenceRenderer);
-            dataTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF);
-
-            sequenceRenderer.setText(options.alignment.getSequence(0).getSequenceString());
-            int w = sequenceRenderer.getPreferredSize().width + 8;
-            dataTable.getColumnModel().getColumn(3).setPreferredWidth(w);
-        }
-    }
-
-    public void getOptions(BeautiOptions options) {
-        options.datesUnits = unitsCombo.getSelectedIndex();
-        options.datesDirection = directionCombo.getSelectedIndex();
-        options.translation = translationCombo.getSelectedIndex();
-    }
-
-    public JComponent getExportableComponent() {
-        return dataTable;
-    }
-
-    public void selectionChanged() {
-
-        int[] selRows = dataTable.getSelectedRows();
-        if (selRows == null || selRows.length == 0) {
-            frame.dataSelectionChanged(false);
-        } else {
-            frame.dataSelectionChanged(true);
-        }
-    }
-
-    public void deleteSelection() {
-        int option = JOptionPane.showConfirmDialog(this, "Are you sure you wish to delete\n"+
-                "the selected taxa?\n"+
-                "This operation cannot be undone.",
-                "Warning",
-                JOptionPane.YES_NO_OPTION,
-                JOptionPane.WARNING_MESSAGE);
-
-        if (option == JOptionPane.YES_OPTION) {
-            int[] selRows = dataTable.getSelectedRows();
-            String[] names = new String[selRows.length];
-
-            TableModel model = dataTable.getModel();
-
-            for (int i = 0; i < names.length; i++) {
-                names[i] = (String)model.getValueAt(selRows[i], 0);
-            }
-
-            for (int i = 0; i < names.length; i++) {
-                if (options.originalAlignment != null) {
-                    int index = options.originalAlignment.getTaxonIndex(names[i]);
-                    options.originalAlignment.removeSequence(index);
-                } else {
-                    // there is no alignment so options.taxonList must be a Taxa object:
-                    int index = options.taxonList.getTaxonIndex(names[i]);
-                    ((Taxa)options.taxonList).removeTaxon(options.taxonList.getTaxon(index));
-                }
-            }
-
-            if (options.taxonList.getTaxonCount() == 0) {
-                // if all the sequences are deleted we may as well throw
-                // away the alignment...
-
-                options.originalAlignment = null;
-                options.alignment = null;
-                options.taxonList = null;
-            }
-
-            dataTableModel.fireTableDataChanged();
-            frame.dataChanged();
-        }
-
-    }
-
-    public void clearDates() {
-        for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
-            java.util.Date origin = new java.util.Date(0);
-
-            double d = 0.0;
-
-            Date date = Date.createTimeSinceOrigin(d, Units.Type.YEARS, origin);
-            options.taxonList.getTaxon(i).setAttribute("date", date);
-        }
-
-        // adjust the dates to the current timescale...
-        timeScaleChanged();
-
-        dataTableModel.fireTableDataChanged();
-        frame.dataChanged();
-    }
-
-    public void guessDates() {
-
-        OptionsPanel optionPanel = new OptionsPanel(12, 12);
-
-        optionPanel.addLabel("The date is given by a numerical field in the taxon label that is:");
-
-        final JRadioButton orderRadio = new JRadioButton("Defined by its order", true);
-        final JComboBox orderCombo = new JComboBox(new String[] {"first", "second", "third",
-                "fourth", "fourth from last",
-                "third from last", "second from last", "last"});
-
-        optionPanel.addComponents(orderRadio, orderCombo);
-        optionPanel.addSeparator();
-
-        final JRadioButton prefixRadio = new JRadioButton("Defined by a prefix", false);
-        final JTextField prefixText = new JTextField(16);
-        prefixText.setEnabled(false);
-        optionPanel.addComponents(prefixRadio, prefixText);
-        optionPanel.addSeparator();
-
-        final JCheckBox offsetCheck = new JCheckBox("Add the following value to each: ", false);
-        final RealNumberField offsetText = new RealNumberField();
-        offsetText.setValue(1900);
-        offsetText.setColumns(16);
-        offsetText.setEnabled(false);
-        offsetCheck.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                offsetText.setEnabled(offsetCheck.isSelected());
-            }
-        });
-        optionPanel.addComponents(offsetCheck, offsetText);
-
-        final JCheckBox unlessCheck = new JCheckBox("...unless less than:", false);
-        final RealNumberField unlessText = new RealNumberField();
-        Calendar calendar = GregorianCalendar.getInstance();
-
-        int year = calendar.get(Calendar.YEAR) - 1999;
-        unlessText.setValue(year);
-        unlessText.setColumns(16);
-        unlessText.setEnabled(false);
-        optionPanel.addComponents(unlessCheck, unlessText);
-
-        final RealNumberField offset2Text = new RealNumberField();
-        offset2Text.setValue(2000);
-        offset2Text.setColumns(16);
-        offset2Text.setEnabled(false);
-        optionPanel.addComponentWithLabel("...in which case add:", offset2Text);
-
-        unlessCheck.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                unlessText.setEnabled(unlessCheck.isSelected());
-                offset2Text.setEnabled(unlessCheck.isSelected());
-            }
-        });
-
-        ButtonGroup group = new ButtonGroup();
-        group.add(orderRadio);
-        group.add(prefixRadio);
-        ItemListener listener = new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                orderCombo.setEnabled(orderRadio.isSelected());
-                prefixText.setEnabled(prefixRadio.isSelected());
-            }
-        };
-        orderRadio.addItemListener(listener);
-        prefixRadio.addItemListener(listener);
-
-        JOptionPane optionPane = new JOptionPane(optionPanel,
-                JOptionPane.QUESTION_MESSAGE,
-                JOptionPane.OK_CANCEL_OPTION,
-                null,
-                null,
-                null);
-        optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
-
-        JDialog dialog = optionPane.createDialog(frame, "Guess Dates");
-//		dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE);
-        dialog.setVisible(true);
-
-        if (optionPane.getValue() == null) {
-            return;
-        }
-
-        int value = ((Integer)optionPane.getValue()).intValue();
-        if (value == -1 || value == JOptionPane.CANCEL_OPTION) {
-            return;
-        }
-
-        options.guessDates = true;
-
-        String warningMessage = null;
-
-        for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
-            java.util.Date origin = new java.util.Date(0);
-
-            double d = 0.0;
-
-            try {
-                if (orderRadio.isSelected()) {
-                    options.guessDateFromOrder = true;
-                    options.order = orderCombo.getSelectedIndex();
-                    options.fromLast = false;
-                    if (options.order > 3) {
-                        options.fromLast = true;
-                        options.order = 8 - options.order - 1;
-                    }
-
-                    d = options.guessDateFromOrder(options.taxonList.getTaxonId(i), options.order, options.fromLast);
-                } else {
-                    options.guessDateFromOrder = false;
-                    options.prefix = prefixText.getText();
-                    d = options.guessDateFromPrefix(options.taxonList.getTaxonId(i), options.prefix);
-                }
-
-            } catch (GuessDatesException gfe) {
-                warningMessage = gfe.getMessage();
-            }
-
-            options.offset = 0.0;
-            options.unlessLessThan = 0.0;
-            if (offsetCheck.isSelected()) {
-                options.offset = offsetText.getValue().doubleValue();
-                if (unlessCheck.isSelected()) {
-                    options.unlessLessThan = unlessText.getValue().doubleValue();
-                    options.offset2 = offset2Text.getValue().doubleValue();
-                    if (d < options.unlessLessThan) {
-                        d += options.offset2;
-                    } else {
-                        d += options.offset;
-                    }
-                } else {
-                    d += options.offset;
-                }
-            }
-
-            Date date = Date.createTimeSinceOrigin(d, Units.Type.YEARS, origin);
-            options.taxonList.getTaxon(i).setAttribute("date", date);
-        }
-
-        if (warningMessage != null) {
-            JOptionPane.showMessageDialog(this, "Warning: some dates may not be set correctly - \n" + warningMessage,
-                    "Error guessing dates",
-                    JOptionPane.WARNING_MESSAGE);
-        }
-
-        // adjust the dates to the current timescale...
-        timeScaleChanged();
-
-        dataTableModel.fireTableDataChanged();
-        frame.dataChanged();
-    }
-
-    public class ClearDatesAction extends AbstractAction {
-        /**
-         *
-         */
-        private static final long serialVersionUID = -7281309694753868635L;
-
-        public ClearDatesAction() {
-            super("Clear Dates");
-            setToolTipText("Use this tool to remove sampling dates from each taxon");
-        }
-
-        public void actionPerformed(ActionEvent ae) { clearDates(); }
-    };
-
-    public class GuessDatesAction extends AbstractAction {
-        /**
-         *
-         */
-        private static final long serialVersionUID = 8514706149822252033L;
-
-        public GuessDatesAction() {
-            super("Guess Dates");
-            setToolTipText("Use this tool to guess the sampling dates from the taxon labels");
-        }
-
-        public void actionPerformed(ActionEvent ae) { guessDates(); }
-    };
-
-    private void calculateHeights() {
-
-        options.maximumTipHeight = 0.0;
-        if (options.alignment == null) return;
-
-        heights = null;
-
-        dr.evolution.util.Date mostRecent = null;
-        for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
-            Date date = options.taxonList.getTaxon(i).getDate();
-            if ((date != null) && (mostRecent == null || date.after(mostRecent))) {
-                mostRecent = date;
-            }
-        }
-
-        if (mostRecent != null) {
-            heights = new double[options.taxonList.getTaxonCount()];
-
-            TimeScale timeScale = new TimeScale(mostRecent.getUnits(), true, mostRecent.getAbsoluteTimeValue());
-            double time0 = timeScale.convertTime(mostRecent.getTimeValue(), mostRecent);
-
-            for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
-                Date date = options.taxonList.getTaxon(i).getDate();
-                if (date != null) {
-                    heights[i] = timeScale.convertTime(date.getTimeValue(), date) - time0;
-                    if (heights[i] > options.maximumTipHeight) options.maximumTipHeight = heights[i];
-                }
-            }
-        }
-    }
-
-    class DataTableModel extends AbstractTableModel {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -6707994233020715574L;
-        String[] columnNames1 = { "Name", "Date", "Height", "Sequence" };
-        String[] columnNames2 = { "Name", "Date", "Height" };
-
-        public DataTableModel() {
-        }
-
-        public int getColumnCount() {
-            if (options != null && options.alignment != null) {
-                return columnNames1.length;
-            } else {
-                return columnNames2.length;
-            }
-        }
-
-        public int getRowCount() {
-            if (options == null) return 0;
-            if (options.taxonList == null) return 0;
-
-            return options.taxonList.getTaxonCount();
-        }
-
-        public Object getValueAt(int row, int col) {
-            switch (col) {
-                case 0: return options.taxonList.getTaxonId(row);
-                case 1:
-                    Date date = options.taxonList.getTaxon(row).getDate();
-                    if (date != null) {
-                        return new Double(date.getTimeValue());
-                    } else {
-                        return "-";
-                    }
-                case 2:
-                    if (heights != null) {
-                        return new Double(heights[row]);
-                    } else {
-                        return "0.0";
-                    }
-                case 3: return options.alignment.getAlignedSequenceString(row);
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int row, int col) {
-            if (col == 0) {
-                options.taxonList.getTaxon(row).setId(aValue.toString());
-            } else if (col == 1) {
-                Date date = options.taxonList.getTaxon(row).getDate();
-                if (date != null) {
-                    double d = ((Double)aValue).doubleValue();
-                    Date newDate = createDate(d, date.getUnits(), date.isBackwards(), date.getOrigin());
-                    options.taxonList.getTaxon(row).setDate(newDate);
-                }
-            }
-
-            dataChanged();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            if (col == 0) return true;
-            if (col == 1) {
-                Date date = options.taxonList.getTaxon(row).getDate();
-                return (date != null);
-            }
-            return false;
-        }
-
-        public String getColumnName(int column) {
-            return columnNames1[column];
-        }
-
-        public Class getColumnClass(int c) {return getValueAt(0, c).getClass();}
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    };
-
-}
diff --git a/src/dr/app/oldbeauti/DiscretePriorDialog.java b/src/dr/app/oldbeauti/DiscretePriorDialog.java
deleted file mode 100644
index 9200a17..0000000
--- a/src/dr/app/oldbeauti/DiscretePriorDialog.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * DiscretePriorDialog.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.RealNumberField;
-import jam.panels.OptionsPanel;
-
-import javax.swing.*;
-import javax.swing.border.EmptyBorder;
-import java.awt.event.ItemEvent;
-import java.awt.event.ItemListener;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: PriorDialog.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $
- */
-public class DiscretePriorDialog {
-
-	private JFrame frame;
-
-	public static String[] priors = {
-			"Uniform",
-			"Poisson"};
-
-	private String[] argumentNames = new String[] {
-			"Poisson Mean", "Zero Offset"
-	};
-
-	private JComboBox priorCombo;
-	private int[][] argumentIndices = { {}, {0, 1} };
-	private RealNumberField initialField = new RealNumberField();
-	private RealNumberField[] argumentFields = new RealNumberField[argumentNames.length];
-	private OptionsPanel optionPanel;
-
-	private BeautiOptions.Parameter parameter;
-
-	public DiscretePriorDialog(JFrame frame) {
-		this.frame = frame;
-
-		priorCombo = new JComboBox(priors);
-
-		initialField.setColumns(8);
-		for (int i = 0; i < argumentNames.length; i++) {
-			argumentFields[i] = new RealNumberField();
-			argumentFields[i].setColumns(8);
-		}
-
-		optionPanel = new OptionsPanel(12,12);
-	}
-
-	public int showDialog(final BeautiOptions.Parameter parameter) {
-
-		this.parameter = parameter;
-
-		priorCombo.setSelectedIndex(parameter.priorType == PriorType.POISSON_PRIOR ? 1 : 0);
-
-		if (!parameter.isStatistic) {
-			initialField.setRange(parameter.lower, parameter.upper);
-			initialField.setValue(parameter.initial);
-		}
-
-		setArguments();
-		setupComponents();
-
-		JOptionPane optionPane = new JOptionPane(optionPanel,
-				JOptionPane.QUESTION_MESSAGE,
-				JOptionPane.OK_CANCEL_OPTION,
-				null,
-				null,
-				null);
-		optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
-
-		final JDialog dialog = optionPane.createDialog(frame, "Prior for Parameter");
-		dialog.pack();
-
-		priorCombo.addItemListener(new ItemListener() {
-			public void itemStateChanged(ItemEvent e) {
-				setupComponents();
-				dialog.pack();
-				dialog.repaint();
-			}});
-
-		dialog.setVisible(true);
-
-		int result = JOptionPane.CANCEL_OPTION;
-		Integer value = (Integer)optionPane.getValue();
-		if (value != null && value != -1) {
-			result = value;
-		}
-
-		if (result == JOptionPane.OK_OPTION) {
-			getArguments();
-		}
-
-		return result;
-	}
-
-	private void setArguments() {
-		argumentFields[0].setRange(0.0, Double.MAX_VALUE);
-		argumentFields[0].setValue(parameter.poissonMean);
-		argumentFields[1].setValue(parameter.poissonOffset);
-
-	}
-
-	private void getArguments() {
-		parameter.priorType = priorCombo.getSelectedIndex() == 0 ? PriorType.UNIFORM_PRIOR : PriorType.POISSON_PRIOR;
-
-		if (initialField.getValue() != null) parameter.initial = initialField.getValue();
-
-		switch (parameter.priorType) {
-			case UNIFORM_PRIOR:
-				if (argumentFields[0].getValue() != null) parameter.uniformLower = argumentFields[0].getValue();
-				if (argumentFields[1].getValue() != null) parameter.uniformUpper = argumentFields[1].getValue();
-				break;
-			case POISSON_PRIOR:
-				if (argumentFields[0].getValue() != null) parameter.poissonMean = argumentFields[0].getValue();
-				if (argumentFields[1].getValue() != null) parameter.poissonOffset = argumentFields[1].getValue();
-				break;
-			default: throw new IllegalArgumentException("Unknown prior index");
-		}
-	}
-
-	private void setupComponents() {
-		optionPanel.removeAll();
-
-		optionPanel.addSpanningComponent(new JLabel("Select prior distribution for " + parameter.getName()));
-
-		optionPanel.addComponents(new JLabel("Prior Distribution:"), priorCombo);
-		int priorType = priorCombo.getSelectedIndex();
-
-		optionPanel.addSeparator();
-
-		for (int i = 0; i < argumentIndices[priorType].length; i++) {
-			int k = argumentIndices[priorType][i];
-			optionPanel.addComponentWithLabel(argumentNames[k] + ":", argumentFields[k]);
-		}
-
-
-		if (!parameter.isStatistic) {
-			optionPanel.addSeparator();
-			optionPanel.addComponents(new JLabel("Initial Value:"), initialField);
-		}
-	}
-
-}
diff --git a/src/dr/app/oldbeauti/MCMCPanel.java b/src/dr/app/oldbeauti/MCMCPanel.java
deleted file mode 100644
index 9fda70e..0000000
--- a/src/dr/app/oldbeauti/MCMCPanel.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
- * MCMCPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.WholeNumberField;
-import jam.framework.Exportable;
-import jam.panels.OptionsPanel;
-
-import javax.swing.*;
-import javax.swing.event.ChangeListener;
-import javax.swing.event.ChangeEvent;
-import java.awt.event.ActionEvent;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: MCMCPanel.java,v 1.16 2006/09/05 13:29:34 rambaut Exp $
- */
-public class MCMCPanel extends OptionsPanel implements Exportable {
-
-	/**
-	 *
-	 */
-	private static final long serialVersionUID = -3710586474593827540L;
-	WholeNumberField chainLengthField = new WholeNumberField(1, Integer.MAX_VALUE);
-	WholeNumberField echoEveryField = new WholeNumberField(1, Integer.MAX_VALUE);
-	WholeNumberField logEveryField = new WholeNumberField(1, Integer.MAX_VALUE);
-
-	JCheckBox samplePriorCheckBox = new JCheckBox("Sample from prior only - create empty alignment");
-
-	JTextField logFileNameField = new JTextField("untitled.log");
-	JTextField treeFileNameField = new JTextField("untitled.trees");
-    JCheckBox mapTreeLogCheck = new JCheckBox("Create tree file containing the MAP tree:");
-    JTextField mapTreeFileNameField = new JTextField("untitled.MAP.tree");
-    JCheckBox substTreeLogCheck = new JCheckBox("Create tree log file with branch length in substitutions:");
-    JTextField substTreeFileNameField = new JTextField("untitled(subst).trees");
-
-	BeautiFrame frame = null;
-
-	public MCMCPanel(BeautiFrame parent) {
-
-		super(12, 24);
-
-		this.frame = parent;
-
-		setOpaque(false);
-
-		chainLengthField.setValue(100000);
-		chainLengthField.setColumns(10);
-		addComponentWithLabel("Length of chain:", chainLengthField);
-
-		addSeparator();
-
-		echoEveryField.setValue(1000);
-		echoEveryField.setColumns(10);
-		addComponentWithLabel("Echo state to screen every:", echoEveryField);
-
-		logEveryField.setValue(100);
-		logEveryField.setColumns(10);
-		addComponentWithLabel("Log parameters every:", logEveryField);
-
-		addSeparator();
-
-		logFileNameField.setColumns(32);
-		addComponentWithLabel("Log file name:", logFileNameField);
-		treeFileNameField.setColumns(32);
-		addComponentWithLabel("Trees file name:", treeFileNameField);
-
-//        addComponent(mapTreeLogCheck);
-//        mapTreeLogCheck.setOpaque(false);
-//        mapTreeLogCheck.addActionListener(new java.awt.event.ActionListener() {
-//            public void actionPerformed(ActionEvent e) {
-//                mapTreeFileNameField.setEnabled(mapTreeLogCheck.isSelected());
-//            }
-//        });
-//
-//        mapTreeFileNameField.setColumns(32);
-//        addComponentWithLabel("MAP tree file name:", mapTreeFileNameField);
-
-        addComponent(substTreeLogCheck);
-        substTreeLogCheck.setOpaque(false);
-        substTreeLogCheck.addActionListener(new java.awt.event.ActionListener() {
-            public void actionPerformed(ActionEvent e) {
-                substTreeFileNameField.setEnabled(substTreeLogCheck.isSelected());
-	            frame.mcmcChanged();
-           }
-        });
-
-        substTreeFileNameField.setColumns(32);
-        addComponentWithLabel("Substitutions trees file name:", substTreeFileNameField);
-
-		java.awt.event.KeyListener listener = new java.awt.event.KeyAdapter() {
-			public void keyTyped(java.awt.event.KeyEvent ev) {
-				frame.mcmcChanged();
-			}
-		};
-
-		addSeparator();
-
-		addComponent(samplePriorCheckBox);
-		samplePriorCheckBox.setOpaque(false);
-		samplePriorCheckBox.addChangeListener(new ChangeListener() {
-		    public void stateChanged(ChangeEvent changeEvent) {
-			    frame.mcmcChanged();
-		    }
-		});
-
-		chainLengthField.addKeyListener(listener);
-		echoEveryField.addKeyListener(listener);
-		logEveryField.addKeyListener(listener);
-		logFileNameField.addKeyListener(listener);
-		treeFileNameField.addKeyListener(listener);
-        //mapTreeFileNameField.addKeyListener(listener);
-        substTreeFileNameField.addKeyListener(listener);
-	}
-
-	public void setOptions(BeautiOptions options) {
-
-		chainLengthField.setValue(options.chainLength);
-
-		echoEveryField.setValue(options.echoEvery);
-		logEveryField.setValue(options.logEvery);
-
-		if (options.fileNameStem != null) {
-            if (options.logFileName == null) {
-			logFileNameField.setText(options.fileNameStem + ".log");
-            } else {
-                logFileNameField.setText(options.logFileName);
-            }
-            if (options.treeFileName == null) {
-			    treeFileNameField.setText(options.fileNameStem + ".trees");
-            } else {
-                treeFileNameField.setText(options.treeFileName);
-            }
-//            if (options.mapTreeFileName == null) {
-//			    mapTreeFileNameField.setText(options.fileNameStem + ".MAP.tree");
-//            } else {
-//                mapTreeFileNameField.setText(options.mapTreeFileName);
-//            }
-            if (options.substTreeFileName == null) {
-			    substTreeFileNameField.setText(options.fileNameStem + "(subst).trees");
-            } else {
-                substTreeFileNameField.setText(options.substTreeFileName);
-            }
-			logFileNameField.setEnabled(true);
-			treeFileNameField.setEnabled(true);
-
-//            mapTreeLogCheck.setEnabled(true);
-//            mapTreeLogCheck.setSelected(options.mapTreeLog);
-//            mapTreeFileNameField.setEnabled(options.mapTreeLog);
-
-            substTreeLogCheck.setEnabled(true);
-            substTreeLogCheck.setSelected(options.substTreeLog);
-            substTreeFileNameField.setEnabled(options.substTreeLog);
-		} else {
-			logFileNameField.setText("untitled");
-			logFileNameField.setEnabled(false);
-			treeFileNameField.setText("untitled");
-			treeFileNameField.setEnabled(false);
-//            mapTreeLogCheck.setEnabled(false);
-//            mapTreeFileNameField.setEnabled(false);
-//            mapTreeFileNameField.setText("untitled");
-            substTreeLogCheck.setEnabled(false);
-            substTreeFileNameField.setEnabled(false);
-            substTreeFileNameField.setText("untitled");
-		}
-
-		samplePriorCheckBox.setSelected(options.samplePriorOnly);
-
-		validate();
-		repaint();
-	}
-
-	public void getOptions(BeautiOptions options) {
-		options.chainLength = chainLengthField.getValue().intValue();
-
-		options.echoEvery = echoEveryField.getValue().intValue();
-		options.logEvery = logEveryField.getValue().intValue();
-
-		options.logFileName = logFileNameField.getText();
-		options.treeFileName = treeFileNameField.getText();
-
-//        options.mapTreeLog = mapTreeLogCheck.isSelected();
-//        options.mapTreeFileName = mapTreeFileNameField.getText();
-
-        options.substTreeLog = substTreeLogCheck.isSelected();
-        options.substTreeFileName = substTreeFileNameField.getText();
-
-		options.samplePriorOnly = samplePriorCheckBox.isSelected();
-	}
-
-    public JComponent getExportableComponent() {
-		return this;
-	}
-
-}
diff --git a/src/dr/app/oldbeauti/ModelPanel.java b/src/dr/app/oldbeauti/ModelPanel.java
deleted file mode 100644
index 4c1be1e..0000000
--- a/src/dr/app/oldbeauti/ModelPanel.java
+++ /dev/null
@@ -1,484 +0,0 @@
-/*
- * ModelPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.RealNumberField;
-import dr.evolution.datatype.DataType;
-import jam.framework.Exportable;
-import jam.panels.OptionsPanel;
-
-import javax.swing.*;
-import java.awt.*;
-import java.awt.event.ActionEvent;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: ModelPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
- */
-public class ModelPanel extends OptionsPanel implements Exportable {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = 2778103564318492601L;
-
-    JComboBox nucSubstCombo = new JComboBox(new String[] {"HKY", "GTR"});
-    JComboBox aaSubstCombo = new JComboBox(new String[] {"Blosum62", "Dayhoff", "JTT", "mtREV", "cpREV", "WAG"});
-    JComboBox binarySubstCombo = new JComboBox(new String[] {"Simple", "Covarion"});
-
-    JComboBox frequencyCombo =new JComboBox(new String[] {"Estimated", "Empirical", "All equal"});
-
-    JComboBox heteroCombo = new JComboBox(new String[] {"None", "Gamma", "Invariant Sites", "Gamma + Invariant Sites"});
-
-    JComboBox gammaCatCombo = new JComboBox(new String[] {"4", "5", "6", "7", "8", "9", "10"});
-    JLabel gammaCatLabel;
-
-    JComboBox codingCombo = new JComboBox(new String[] {
-            "Off",
-            "2 partitions: codon positions (1 + 2), 3",
-            "3 partitions: codon positions 1, 2, 3"});
-    JCheckBox substUnlinkCheck = new JCheckBox("Unlink substitution model across codon positions");
-    JCheckBox heteroUnlinkCheck = new JCheckBox("Unlink rate heterogeneity model across codon positions");
-    JCheckBox freqsUnlinkCheck = new JCheckBox("Unlink base frequencies across codon positions");
-
-    JButton setSRD06Button;
-
-    JCheckBox fixedSubstitutionRateCheck = new JCheckBox("Fix mean substitution rate:");
-    JLabel substitutionRateLabel = new JLabel("Mean substitution rate:");
-    RealNumberField substitutionRateField = new RealNumberField(Double.MIN_VALUE, Double.POSITIVE_INFINITY);
-
-    JComboBox clockModelCombo = new JComboBox(new String[] {
-            "Strict Clock",
-            "Random Local Clock",
-            "Relaxed Clock: Uncorrelated Lognormal",
-            "Relaxed Clock: Uncorrelated Exponential" } );
-
-    BeautiFrame frame = null;
-
-    boolean warningShown = false;
-    boolean hasSetFixedSubstitutionRate = false;
-
-    boolean settingOptions = false;
-
-    boolean hasAlignment = false;
-
-    int dataType = DataType.NUCLEOTIDES;
-
-    public ModelPanel(BeautiFrame parent) {
-
-        super(12, 18);
-
-        this.frame = parent;
-
-        setOpaque(false);
-
-        setupComponent(substUnlinkCheck);
-        substUnlinkCheck.setEnabled(false);
-        substUnlinkCheck.setToolTipText("" +
-                "<html>Gives each codon position partition different<br>" +
-                "substitution model parameters.</html>");
-
-        setupComponent(heteroUnlinkCheck);
-        heteroUnlinkCheck.setEnabled(false);
-        heteroUnlinkCheck.setToolTipText("<html>Gives each codon position partition different<br>rate heterogeneity model parameters.</html>");
-
-        setupComponent(freqsUnlinkCheck);
-        freqsUnlinkCheck.setEnabled(false);
-        freqsUnlinkCheck.setToolTipText("<html>Gives each codon position partition different<br>nucleotide frequency parameters.</html>");
-
-        java.awt.event.ItemListener listener = new java.awt.event.ItemListener() {
-            public void itemStateChanged(java.awt.event.ItemEvent ev) {
-                frame.modelChanged();
-            }
-        };
-
-        setupComponent(nucSubstCombo);
-        nucSubstCombo.addItemListener(listener);
-        nucSubstCombo.setToolTipText("<html>Select the type of nucleotide substitution model.</html>");
-
-        setupComponent(aaSubstCombo);
-        aaSubstCombo.addItemListener(listener);
-        aaSubstCombo.setToolTipText("<html>Select the type of amino acid substitution model.</html>");
-
-        setupComponent(binarySubstCombo);
-        binarySubstCombo.addItemListener(listener);
-        binarySubstCombo.setToolTipText("<html>Select the type of binay substitution model.</html>");
-
-        setupComponent(frequencyCombo);
-        frequencyCombo.addItemListener(listener);
-        frequencyCombo.setToolTipText("<html>Select the policy for determining the base frequencies.</html>");
-
-        setupComponent(heteroCombo);
-        heteroCombo.setToolTipText("<html>Select the type of site-specific rate<br>heterogeneity model.</html>");
-        heteroCombo.addItemListener(
-                new java.awt.event.ItemListener() {
-                    public void itemStateChanged(java.awt.event.ItemEvent ev) {
-
-                        frame.modelChanged();
-
-                        if (heteroCombo.getSelectedIndex() == 1 || heteroCombo.getSelectedIndex() == 3) {
-                            gammaCatLabel.setEnabled(true);
-                            gammaCatCombo.setEnabled(true);
-                        } else {
-                            gammaCatLabel.setEnabled(false);
-                            gammaCatCombo.setEnabled(false);
-                        }
-                    }
-                }
-        );
-
-        setupComponent(gammaCatCombo);
-        gammaCatCombo.setToolTipText("<html>Select the number of categories to use for<br>the discrete gamma rate heterogeneity model.</html>");
-        gammaCatCombo.addItemListener(listener);
-
-        setupComponent(codingCombo);
-        codingCombo.setToolTipText("<html>Select how to partition the codon positions.</html>");
-        codingCombo.addItemListener(
-                new java.awt.event.ItemListener() {
-                    public void itemStateChanged(java.awt.event.ItemEvent ev) {
-
-                        frame.modelChanged();
-
-                        if (codingCombo.getSelectedIndex() != 0) { // codon position partitioning
-                            substUnlinkCheck.setEnabled(true);
-                            heteroUnlinkCheck.setEnabled(true);
-                            freqsUnlinkCheck.setEnabled(true);
-                        } else {
-                            substUnlinkCheck.setEnabled(false);
-                            substUnlinkCheck.setSelected(false);
-                            heteroUnlinkCheck.setEnabled(false);
-                            heteroUnlinkCheck.setSelected(false);
-                            freqsUnlinkCheck.setEnabled(false);
-                            freqsUnlinkCheck.setSelected(false);
-                        }
-                    }
-                }
-        );
-
-        substUnlinkCheck.addItemListener(listener);
-        heteroUnlinkCheck.addItemListener(listener);
-        freqsUnlinkCheck.addItemListener(listener);
-
-        setSRD06Button = new JButton(setSRD06Action);
-        setupComponent(setSRD06Button);
-        setSRD06Button.setToolTipText("<html>Sets the SRD06 model as described in<br>" +
-                "Shapiro, Rambaut & Drummond (2006) <i>MBE</i> <b>23</b>: 7-9.</html>");
-
-        setupComponent(fixedSubstitutionRateCheck);
-        fixedSubstitutionRateCheck.setToolTipText(
-                "<html>Select this option to fix the substitution rate<br>" +
-                        "rather than try to infer it. If this option is<br>" +
-                        "turned off then either the sequences should have<br>" +
-                        "dates or the tree should have sufficient calibration<br>" +
-                        "informations specified as priors.</html>");
-        fixedSubstitutionRateCheck.addItemListener(
-                new java.awt.event.ItemListener() {
-                    public void itemStateChanged(java.awt.event.ItemEvent ev) {
-                        boolean fixed = fixedSubstitutionRateCheck.isSelected();
-                        substitutionRateLabel.setEnabled(fixed);
-                        substitutionRateField.setEnabled(fixed);
-                        hasSetFixedSubstitutionRate = true;
-                        frame.modelChanged();
-                    }
-                }
-        );
-
-        setupComponent(substitutionRateField);
-        substitutionRateField.addKeyListener(new java.awt.event.KeyAdapter() {
-            public void keyTyped(java.awt.event.KeyEvent ev) {
-                frame.mcmcChanged();
-            }});
-        substitutionRateField.setToolTipText("<html>Enter the substitution rate here.</html>");
-
-        setupComponent(clockModelCombo);
-        clockModelCombo.setToolTipText("<html>Select either a strict molecular clock or<br>or a relaxed clock model.</html>");
-        clockModelCombo.addItemListener(listener);
-
-        setupPanel();
-    }
-
-    private void setupComponent(JComponent comp) {
-        comp.setOpaque(false);
-
-        //comp.setFont(UIManager.getFont("SmallSystemFont"));
-        //comp.putClientProperty("JComponent.sizeVariant", "small");
-        if (comp instanceof JButton) {
-            comp.putClientProperty("JButton.buttonType", "roundRect");
-        }
-        if (comp instanceof JComboBox) {
-            comp.putClientProperty("JComboBox.isSquare", Boolean.TRUE);
-        }
-    }
-
-    private void setupPanel() {
-
-        removeAll();
-
-        if (hasAlignment) {
-
-            switch (dataType){
-                case DataType.NUCLEOTIDES:
-                    addComponentWithLabel("Substitution Model:", nucSubstCombo, true);
-                    addComponentWithLabel("Base frequencies:", frequencyCombo);
-                    addComponentWithLabel("Site Heterogeneity Model:", heteroCombo);
-                    gammaCatLabel = addComponentWithLabel("Number of Gamma Categories:", gammaCatCombo);
-
-                    addSeparator();
-
-                    JPanel panel = new JPanel(new BorderLayout(6,6));
-                    panel.setOpaque(false);
-                    panel.add(codingCombo, BorderLayout.CENTER);
-                    panel.add(setSRD06Button, BorderLayout.EAST);
-                    addComponentWithLabel("Partition into codon positions:", panel);
-
-                    panel = new JPanel();
-                    panel.setOpaque(false);
-                    panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS));
-                    panel.setBorder(BorderFactory.createTitledBorder("Link/Unlink parameters:"));
-                    panel.add(substUnlinkCheck);
-                    panel.add(heteroUnlinkCheck);
-                    panel.add(freqsUnlinkCheck);
-
-                    addComponent(panel);
-                    break;
-
-                case DataType.AMINO_ACIDS:
-                    addComponentWithLabel("Substitution Model:", aaSubstCombo);
-                    addComponentWithLabel("Site Heterogeneity Model:", heteroCombo);
-                    gammaCatLabel = addComponentWithLabel("Number of Gamma Categories:", gammaCatCombo);
-
-                    break;
-
-                case DataType.TWO_STATES:
-                case DataType.COVARION:
-                    addComponentWithLabel("Substitution Model:", binarySubstCombo);
-                    addComponentWithLabel("Site Heterogeneity Model:", heteroCombo);
-                    gammaCatLabel = addComponentWithLabel("Number of Gamma Categories:", gammaCatCombo);
-
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown data type");
-
-            }
-
-            addSeparator();
-
-            //addComponent(fixedSubstitutionRateCheck);
-            substitutionRateField.setColumns(10);
-            addComponents(fixedSubstitutionRateCheck, substitutionRateField);
-
-            addSeparator();
-        }
-
-        addComponentWithLabel("Molecular Clock Model:", clockModelCombo);
-        validate();
-        repaint();
-    }
-
-    private void setSRD06Model() {
-        nucSubstCombo.setSelectedIndex(0);
-        heteroCombo.setSelectedIndex(1);
-        codingCombo.setSelectedIndex(1);
-        substUnlinkCheck.setSelected(true);
-        heteroUnlinkCheck.setSelected(true);
-    }
-
-    public void setOptions(BeautiOptions options) {
-
-        settingOptions = true;
-
-        if (options.alignment != null) {
-            hasAlignment = true;
-
-            dataType=options.dataType;
-            switch(dataType){
-                case DataType.NUCLEOTIDES:
-                    if (options.nucSubstitutionModel == BeautiOptions.GTR) {
-                        nucSubstCombo.setSelectedIndex(1);
-                    } else {
-                        nucSubstCombo.setSelectedIndex(0);
-                    }
-
-                    frequencyCombo.setSelectedIndex(options.frequencyPolicy);
-
-                    break;
-
-                case DataType.AMINO_ACIDS:
-                    aaSubstCombo.setSelectedIndex(options.aaSubstitutionModel);
-                    break;
-
-                case DataType.TWO_STATES:
-                case DataType.COVARION:
-                    binarySubstCombo.setSelectedIndex(options.binarySubstitutionModel);
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown data type");
-            }
-
-        } else {
-            hasAlignment = false;
-        }
-
-        if (options.gammaHetero && !options.invarHetero) {
-            heteroCombo.setSelectedIndex(1);
-        } else if (!options.gammaHetero && options.invarHetero) {
-            heteroCombo.setSelectedIndex(2);
-        } else if (options.gammaHetero && options.invarHetero) {
-            heteroCombo.setSelectedIndex(3);
-        } else {
-            heteroCombo.setSelectedIndex(0);
-        }
-
-        gammaCatCombo.setSelectedIndex(options.gammaCategories - 4);
-
-        if (options.codonHeteroPattern == null) {
-            codingCombo.setSelectedIndex(0);
-        } else if (options.codonHeteroPattern.equals("112")) {
-            codingCombo.setSelectedIndex(1);
-        } else {
-            codingCombo.setSelectedIndex(2);
-        }
-
-        substUnlinkCheck.setSelected(options.unlinkedSubstitutionModel);
-        heteroUnlinkCheck.setSelected(options.unlinkedHeterogeneityModel);
-        freqsUnlinkCheck.setSelected(options.unlinkedFrequencyModel);
-
-        hasSetFixedSubstitutionRate = options.hasSetFixedSubstitutionRate;
-        if (!hasSetFixedSubstitutionRate) {
-            if (options.maximumTipHeight > 0.0) {
-                options.meanSubstitutionRate = 0.001;
-                options.fixedSubstitutionRate = false;
-            } else {
-                options.meanSubstitutionRate = 1.0;
-                options.fixedSubstitutionRate = true;
-            }
-        }
-
-        fixedSubstitutionRateCheck.setSelected(options.fixedSubstitutionRate);
-        substitutionRateField.setValue(options.meanSubstitutionRate);
-        substitutionRateField.setEnabled(options.fixedSubstitutionRate);
-
-        switch (options.clockModel) {
-            case BeautiOptions.STRICT_CLOCK:
-                clockModelCombo.setSelectedIndex(0); break;
-            case BeautiOptions.RANDOM_LOCAL_CLOCK:
-                clockModelCombo.setSelectedIndex(1); break;
-            case BeautiOptions.UNCORRELATED_LOGNORMAL:
-                clockModelCombo.setSelectedIndex(2); break;
-            case BeautiOptions.UNCORRELATED_EXPONENTIAL:
-                clockModelCombo.setSelectedIndex(3); break;
-            default:
-                throw new IllegalArgumentException("Unknown option for clock model");
-        }
-        setupPanel();
-
-        settingOptions = false;
-
-        validate();
-        repaint();
-    }
-
-    public void getOptions(BeautiOptions options) {
-
-        // This prevents options be overwritten due to listeners calling
-        // this function (indirectly through modelChanged()) whilst in the
-        // middle of the setOptions() method.
-        if (settingOptions) return;
-
-        if (nucSubstCombo.getSelectedIndex() == 1) {
-            options.nucSubstitutionModel = BeautiOptions.GTR;
-        } else {
-            options.nucSubstitutionModel = BeautiOptions.HKY;
-        }
-        options.aaSubstitutionModel = aaSubstCombo.getSelectedIndex();
-
-        options.binarySubstitutionModel = binarySubstCombo.getSelectedIndex();
-
-        options.frequencyPolicy = frequencyCombo.getSelectedIndex();
-
-        options.gammaHetero = heteroCombo.getSelectedIndex() == 1 || heteroCombo.getSelectedIndex() == 3;
-
-        options.invarHetero = heteroCombo.getSelectedIndex() == 2 || heteroCombo.getSelectedIndex() == 3;
-
-        options.gammaCategories = gammaCatCombo.getSelectedIndex() + 4;
-
-        if (codingCombo.getSelectedIndex() == 0) {
-            options.codonHeteroPattern = null;
-        } else if (codingCombo.getSelectedIndex() == 1) {
-            options.codonHeteroPattern = "112";
-        } else {
-            options.codonHeteroPattern = "123";
-        }
-
-        options.unlinkedSubstitutionModel = substUnlinkCheck.isSelected();
-        options.unlinkedHeterogeneityModel = heteroUnlinkCheck.isSelected();
-        options.unlinkedFrequencyModel = freqsUnlinkCheck.isSelected();
-
-        options.hasSetFixedSubstitutionRate = hasSetFixedSubstitutionRate;
-        options.fixedSubstitutionRate = fixedSubstitutionRateCheck.isSelected();
-        options.meanSubstitutionRate = substitutionRateField.getValue();
-
-        boolean fixed = fixedSubstitutionRateCheck.isSelected();
-        if (!warningShown && !fixed && options.maximumTipHeight == 0.0) {
-            JOptionPane.showMessageDialog(frame,
-                    "You have chosen to sample substitution rates but all \n"+
-                            "the sequences have the same date. In order for this to \n"+
-                            "work, a strong prior is required on the substitution\n"+
-                            "rate or the root of the tree.",
-                    "Warning",
-                    JOptionPane.WARNING_MESSAGE);
-            warningShown = true;
-        }
-
-        switch (clockModelCombo.getSelectedIndex()) {
-            case 0:
-                options.clockModel = BeautiOptions.STRICT_CLOCK; break;
-            case 1:
-                options.clockModel = BeautiOptions.RANDOM_LOCAL_CLOCK; break;
-            case 2:
-                options.clockModel = BeautiOptions.UNCORRELATED_LOGNORMAL; break;
-            case 3:
-                options.clockModel = BeautiOptions.UNCORRELATED_EXPONENTIAL; break;
-            default:
-                throw new IllegalArgumentException("Unknown option for clock model");
-        }
-    }
-
-    public JComponent getExportableComponent() {
-
-        return this;
-    }
-
-    private Action setSRD06Action = new AbstractAction("Use SRD06 Model") {
-        public void actionPerformed(ActionEvent actionEvent) {
-            setSRD06Model();
-        }
-    };
-
-}
diff --git a/src/dr/app/oldbeauti/NexusApplicationImporter.java b/src/dr/app/oldbeauti/NexusApplicationImporter.java
deleted file mode 100644
index 790d824..0000000
--- a/src/dr/app/oldbeauti/NexusApplicationImporter.java
+++ /dev/null
@@ -1,416 +0,0 @@
-/*
- * NexusApplicationImporter.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.evolution.io.NexusImporter;
-
-import java.io.IOException;
-import java.io.Reader;
-import java.io.Writer;
-
-/**
- * Class for importing PAUP, MrBayes and Rhino NEXUS file format
- *
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: NexusApplicationImporter.java,v 1.4 2005/07/11 14:07:25 rambaut Exp $
- */
-public class NexusApplicationImporter extends NexusImporter { 
-
-	public static final NexusBlock PAUP_BLOCK = new NexusBlock("PAUP");
-	public static final NexusBlock MRBAYES_BLOCK = new NexusBlock("MRBAYES");
-	public static final NexusBlock RHINO_BLOCK = new NexusBlock("RHINO");
-	public static final NexusBlock BEAST_BLOCK = new NexusBlock("BEAST");
-
-	/**
-	 * Constructor
-	 */
-	public NexusApplicationImporter(Reader reader) {
-		super(reader);
-		setCommentDelimiters('[', ']', '\0');
-	}
-	
-	public NexusApplicationImporter(Reader reader, Writer commentWriter) {
-		super(reader, commentWriter);
-		setCommentDelimiters('[', ']', '\0');
-	}
-	
-	/**
-	 * This function returns an enum class to specify what the
-	 * block given by blockName is. 
-	 */
-	public NexusBlock findBlockName(String blockName)
-	{
-		if (blockName.equalsIgnoreCase(PAUP_BLOCK.toString())) {
-			return PAUP_BLOCK;
-		} else  if (blockName.equalsIgnoreCase(MRBAYES_BLOCK.toString())) {
-			return MRBAYES_BLOCK;
-		} else  if (blockName.equalsIgnoreCase(BEAST_BLOCK.toString())) {
-			return BEAST_BLOCK;
-		} else  if (blockName.equalsIgnoreCase(RHINO_BLOCK.toString())) {
-			return RHINO_BLOCK;
-		} else  {
-			return super.findBlockName(blockName);
-		}
-	}
-
-	/**
-	 * Parses a 'PAUP' block. 
-	 */
-	public void parsePAUPBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		// PAUP is largely a subset of BEAST block
-		readBEASTBlock(options);
-	}
-
-	/**
-	 * Parses a 'MRBAYES' block. 
-	 */
-	public void parseMrBayesBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		// MRBAYES is largely a subset of BEAST block
-		readBEASTBlock(options);
-	}
-
-	/**
-	 * Parses a 'BEAST' block. 
-	 */
-	public void parseBEASTBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		readBEASTBlock(options);
-	}
-
-	/**
-	 * Parses a 'RHINO' block. 
-	 */
-	public void parseRhinoBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		readRhinoBlock(options);
-	}
-
-	private void readBEASTBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String command = readToken(";");
-			if (match("HSEARCH", command, 2)) {
-				done = true;
-			} else if (match("MCMC", command, 4)) {
-				if (getLastDelimiter() != ';') {
-					readMCMCCommand(options);
-				}
-				done = true;
-			} else if (match("MCMCP", command, 5)) {
-				if (getLastDelimiter() != ';') {
-					readMCMCCommand(options);
-				}
-			} else if (match("LSET", command, 2)) {
-				if (getLastDelimiter() != ';') {
-					readLSETCommand(options);
-				}
-			} else if (command.equalsIgnoreCase("ENDBLOCK") || command.equalsIgnoreCase("END")) {
-				done = true;
-			} else {
-						
-				System.err.println("The command, '" + command + "', is not used by BEAST and has been ignored");
-			}
-		}
-	}
-
-	private void readLSETCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("NST", subcommand, 2)) {
-				int nst = readInteger( ";" );
-				if (nst == 1) {
-					options.nucSubstitutionModel = BeautiOptions.JC;
-				} else if (nst == 2) {
-					options.nucSubstitutionModel = BeautiOptions.HKY;
-				} else if (nst == 6) {
-					options.nucSubstitutionModel = BeautiOptions.GTR;
-				} else {
-					throw new BadFormatException("Bad value for NST subcommand of LSET command");
-				}
-			} else if (match("RATES", subcommand, 2)) {
-				String token = readToken( ";" );
-				
-				if (match("EQUAL", token, 1)) {
-					options.gammaHetero = false;
-					options.invarHetero = false;
-				} else if (match("GAMMA", token, 1)) {
-					options.gammaHetero = true;
-					options.invarHetero = false;
-				} else if (match("PROPINV", token, 1)) {
-					options.gammaHetero = false;
-					options.invarHetero = true;
-				} else if (match("INVGAMMA", token, 1)) {
-					options.gammaHetero = true;
-					options.invarHetero = true;
-				} else if (match("ADGAMMA", token, 1)) {
-					System.err.println("The option, 'RATES=ADGAMMA', in the LSET command is not used by BEAST and has been ignored");
-				} else if (match("SITESPEC", token, 1)) {
-					System.err.println("The option, 'RATES=SITESPEC', in the LSET command is not used by BEAST and has been ignored");
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else if (match("NGAMMACAT", subcommand, 2)) {
-			
-				options.gammaCategories = readInteger( ";" );
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the LSET command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private void readMCMCCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("NGEN", subcommand, 2)) {
-				options.chainLength = readInteger( ";" );
-			} else if (match("SAMPLEFREQ", subcommand, 2)) {
-				options.logEvery = readInteger( ";" );
-			} else if (match("PRINTFREQ", subcommand, 1)) {
-				options.echoEvery = readInteger( ";" );
-			} else if (match("FILENAME", subcommand, 1)) {
-				options.fileName = readToken( ";" );
-			} else if (match("BURNIN", subcommand, 1)) {
-				options.burnIn = readInteger( ";" );
-			} else if (match("STARTINGTREE", subcommand, 2)) {
-				String token = readToken(";");
-				if (match("USER", token, 1)) {
-					options.userTree = true;
-				} else if (match("RANDOM", token, 1)) {
-					options.userTree = false;
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the MCMC command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private void readRhinoBlock(BeastGenerator options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String command = readToken(";");
-			if (match("NUCMODEL", command, 2)) {
-				if (getLastDelimiter() != ';') {
-					readNUCMODELCommand(options);
-				}
-			} else if (match("SITEMODEL", command, 2)) {
-				if (getLastDelimiter() != ';') {
-					readSITEMODELCommand(options);
-				}
-			} else if (match("TREEMODEL", command, 2)) {
-				if (getLastDelimiter() != ';') {
-					readTREEMODELCommand(options);
-				}
-			} else if (match("CPPARTITIONMODEL", command, 2)) {
-				if (getLastDelimiter() != ';') {
-					readCPPARTITIONMODELCommand(options);
-				}
-			} else if (match("OPTIMIZE", command, 1)) {
-				done = true;
-			} else if (command.equalsIgnoreCase("ENDBLOCK") || command.equalsIgnoreCase("END")) {
-				done = true;
-			} else {
-						
-				System.err.println("The command, '" + command + "', is not used by BEAST and has been ignored");
-			}
-		}
-	}
-
-	private void readNUCMODELCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("TYPE", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("HKY", token, 1)) {
-					options.nucSubstitutionModel = BeautiOptions.HKY;
-				} else if (match("GTR", token, 1)) {
-					options.nucSubstitutionModel = BeautiOptions.GTR;
-				} else if (match("F84", token, 1)) {
-					System.err.println("The option, 'TYPE=F84', in the NUCMODEL command is not used by BEAST and has been ignored");
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the NUCMODEL command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private void readSITEMODELCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("TYPE", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("HOMOGENEOUS", token, 1)) {
-					options.gammaHetero = false;
-					options.invarHetero = false;
-				} else if (match("GAMMA", token, 2)) {
-					options.gammaHetero = true;
-					options.invarHetero = false;
-				} else if (match("INVAR", token, 1)) {
-					options.gammaHetero = false;
-					options.invarHetero = true;
-				} else if (match("GI", token, 2)) {
-					options.gammaHetero = true;
-					options.invarHetero = true;
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else if (match("NUMCAT", subcommand, 1)) {
-				options.gammaCategories = readInteger( ";" );
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the SITEMODEL command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private void readTREEMODELCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-				
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("TYPE", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("UNCONSTRAINED", token, 1)) {
-					System.err.println("The option, 'TYPE=UNCONSTRAINED', in the TREEMODEL command is not used by BEAST and has been ignored");
-				} else if (match("CONSTRAINED", token, 1)) {
-					// do nothing
-				} else if (match("NODEDATES", token, 1)) {
-					// do nothing
-				} else if (match("TIPDATES", token, 1)) {
-					// do nothing
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the TREEMODEL command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private void readCPPARTITIONMODELCommand(BeautiOptions options) throws ImportException, IOException
-	{
-		boolean done = false;
-		
-		options.codonHeteroPattern = null;
-
-		while (!done) {
-			String subcommand = readToken("=;");
-			if (match("ON", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("TRUE", token, 1)) {
-					options.codonHeteroPattern = "123";
-				} else if (match("FALSE", token, 1)) {
-					options.codonHeteroPattern = null;
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else if (match("SUBSTMODEL", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("TRUE", token, 1)) {
-					options.unlinkedSubstitutionModel = true;
-					options.unlinkedHeterogeneityModel = true;
-				} else if (match("FALSE", token, 1)) {
-					options.unlinkedSubstitutionModel = false;
-					options.unlinkedHeterogeneityModel = false;
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else if (match("FREQMODEL", subcommand, 1)) {
-				String token = readToken(";");
-				if (match("TRUE", token, 1)) {
-					options.unlinkedFrequencyModel = true;
-				} else if (match("FALSE", token, 1)) {
-					options.unlinkedFrequencyModel = false;
-				} else {
-					throw new BadFormatException("Unknown value, '" + token + "'");
-				}
-			} else {
-						
-				System.err.println("The option, '" + subcommand + "', in the CPPARTITIONMODEL command is not used by BEAST and has been ignored");
-			}
-			
-			if (getLastDelimiter() == ';') {
-				done = true;
-			}
-		}
-	}
-
-	private boolean match(String reference, String target, int min) throws ImportException
-	{
-		if (target.length() < min) {
-			throw new BadFormatException("Ambiguous command or subcommand, '" + target + "'");
-		}
-		
-		return reference.startsWith(target.toUpperCase());
-	}
-}
diff --git a/src/dr/app/oldbeauti/OperatorsPanel.java b/src/dr/app/oldbeauti/OperatorsPanel.java
deleted file mode 100644
index 1987dc6..0000000
--- a/src/dr/app/oldbeauti/OperatorsPanel.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- * OperatorsPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.table.RealNumberCellEditor;
-import jam.framework.Exportable;
-import jam.table.HeaderRenderer;
-import jam.table.TableRenderer;
-
-import javax.swing.*;
-import javax.swing.plaf.BorderUIResource;
-import javax.swing.table.AbstractTableModel;
-import java.awt.*;
-import java.util.ArrayList;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: OperatorsPanel.java,v 1.12 2005/07/11 14:07:25 rambaut Exp $
- */
-public class OperatorsPanel extends JPanel implements Exportable {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = -3456667023451785854L;
-    JScrollPane scrollPane = new JScrollPane();
-    JTable operatorTable = null;
-    OperatorTableModel operatorTableModel = null;
-
-    JCheckBox autoOptimizeCheck = null;
-
-    public ArrayList operators = new ArrayList();
-
-    BeautiFrame frame = null;
-
-    public OperatorsPanel(BeautiFrame parent) {
-
-        this.frame = parent;
-
-        operatorTableModel = new OperatorTableModel();
-        operatorTable = new JTable(operatorTableModel);
-
-        operatorTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF);
-        operatorTable.getTableHeader().setReorderingAllowed(false);
-        operatorTable.getTableHeader().setDefaultRenderer(
-                new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-
-//		operatorTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_LAST_COLUMN);
-
-        operatorTable.getColumnModel().getColumn(0).setPreferredWidth(50);
-
-        operatorTable.getColumnModel().getColumn(1).setCellRenderer(
-                new OperatorTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        operatorTable.getColumnModel().getColumn(1).setPreferredWidth(180);
-
-        operatorTable.getColumnModel().getColumn(2).setCellRenderer(
-                new OperatorTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        operatorTable.getColumnModel().getColumn(2).setPreferredWidth(140);
-
-        operatorTable.getColumnModel().getColumn(3).setCellRenderer(
-                new OperatorTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        operatorTable.getColumnModel().getColumn(3).setCellEditor(
-                new RealNumberCellEditor(0, Double.POSITIVE_INFINITY));
-        operatorTable.getColumnModel().getColumn(3).setPreferredWidth(50);
-
-        operatorTable.getColumnModel().getColumn(4).setCellRenderer(
-                new OperatorTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        operatorTable.getColumnModel().getColumn(4).setCellEditor(
-                new RealNumberCellEditor(0, Double.MAX_VALUE));
-        operatorTable.getColumnModel().getColumn(4).setPreferredWidth(50);
-
-        operatorTable.getColumnModel().getColumn(5).setCellRenderer(
-                new OperatorTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        operatorTable.getColumnModel().getColumn(5).setPreferredWidth(400);
-
-        scrollPane = new JScrollPane(operatorTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
-
-        scrollPane.setOpaque(false);
-
-        autoOptimizeCheck = new JCheckBox("Auto Optimize - This option will attempt to tune the operators to maximum efficiency. Turn off to tune the operators manually.");
-        autoOptimizeCheck.setOpaque(false);
-
-        JToolBar toolBar1 = new JToolBar();
-        toolBar1.setFloatable(false);
-        toolBar1.setOpaque(false);
-        toolBar1.setLayout(new FlowLayout(java.awt.FlowLayout.LEFT, 0, 0));
-        toolBar1.add(autoOptimizeCheck);
-
-        setOpaque(false);
-        setLayout(new BorderLayout(0, 0));
-        setBorder(new BorderUIResource.EmptyBorderUIResource(new java.awt.Insets(12, 12, 12, 12)));
-        add(toolBar1, "North");
-        add(scrollPane, "Center");
-    }
-
-    public final void operatorsChanged() {
-        frame.operatorsChanged();
-    }
-
-    public void setOptions(BeautiOptions options) {
-        autoOptimizeCheck.setSelected(options.autoOptimize);
-
-        operators = options.selectOperators();
-
-        operatorTableModel.fireTableDataChanged();
-    }
-
-    public void getOptions(BeautiOptions options) {
-
-        options.autoOptimize = autoOptimizeCheck.isSelected();
-
-    }
-
-    public JComponent getExportableComponent() {
-        return operatorTable;
-    }
-
-    class OperatorTableModel extends AbstractTableModel {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -575580804476182225L;
-        String[] columnNames = {"In use", "Operates on", "Type", "Tuning", "Weight", "Description"};
-
-        public OperatorTableModel() {
-        }
-
-        public int getColumnCount() {
-            return columnNames.length;
-        }
-
-        public int getRowCount() {
-            return operators.size();
-        }
-
-        public Object getValueAt(int row, int col) {
-            BeastGenerator.Operator op = (BeastGenerator.Operator) operators.get(row);
-            switch (col) {
-                case 0:
-                    return op.inUse;
-                case 1:
-                    return op.name;
-                case 2:
-                    return op.type;
-                case 3:
-                    if (op.isTunable()) {
-                        return op.tuning;
-                    } else {
-                        return "n/a";
-                    }
-                case 4:
-                    return op.weight;
-                case 5:
-                    return op.getDescription();
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int row, int col) {
-            BeastGenerator.Operator op = (BeastGenerator.Operator) operators.get(row);
-            switch (col) {
-                case 0:
-                    op.inUse = (Boolean) aValue;
-                    break;
-                case 3:
-                    op.tuning = (Double) aValue;
-                    op.tuningEdited = true;
-                    break;
-                case 4:
-                    op.weight = (Double) aValue;
-                    break;
-            }
-            operatorsChanged();
-        }
-
-        public String getColumnName(int column) {
-            return columnNames[column];
-        }
-
-        public Class getColumnClass(int c) {
-            return getValueAt(0, c).getClass();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            boolean editable;
-
-            BeastGenerator.Operator op = (BeastGenerator.Operator) operators.get(row);
-
-            switch (col){
-                case 0:// Check box
-                    editable = true;
-                    break;
-                case 3:
-                    editable = op.inUse && op.isTunable();
-                    break;
-                case 4:
-                    editable = op.inUse;
-                    break;
-                default:
-                    editable = false;
-            }
-
-            return editable;
-        }
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    }
-
-    class OperatorTableCellRenderer extends TableRenderer{
-
-        public OperatorTableCellRenderer( int alignment, Insets insets){
-            super(alignment, insets);
-        }
-
-        public Component getTableCellRendererComponent(JTable aTable,
-                                                       Object value,
-                                                       boolean aIsSelected,
-                                                       boolean aHasFocus,
-                                                       int aRow, int aColumn) {
-
-            if (value == null) return this;
-
-            Component renderer = super.getTableCellRendererComponent(aTable,
-                    value,
-                    aIsSelected,
-                    aHasFocus,
-                    aRow, aColumn);
-
-            BeastGenerator.Operator op = (BeastGenerator.Operator) operators.get(aRow);
-            if (! op.inUse && aColumn > 0)
-                renderer.setForeground(Color.gray);
-            else
-                renderer.setForeground(Color.black);
-            return this;
-        }
-
-    }
-}
diff --git a/src/dr/app/oldbeauti/PriorDialog.java b/src/dr/app/oldbeauti/PriorDialog.java
deleted file mode 100644
index 9e67c1d..0000000
--- a/src/dr/app/oldbeauti/PriorDialog.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * PriorDialog.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.RealNumberField;
-import dr.app.gui.chart.*;
-import dr.util.NumberFormatter;
-import dr.math.distributions.*;
-import jam.panels.OptionsPanel;
-
-import javax.swing.*;
-import javax.swing.border.EmptyBorder;
-import java.awt.*;
-import java.awt.event.*;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @version $Id: PriorDialog.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $
- */
-public class PriorDialog {
-
-    private JFrame frame;
-
-    public static PriorType[] priors = {
-            PriorType.UNIFORM_PRIOR,
-            PriorType.EXPONENTIAL_PRIOR,
-            PriorType.NORMAL_PRIOR,
-            PriorType.LOGNORMAL_PRIOR,
-            PriorType.GAMMA_PRIOR,
-            PriorType.JEFFREYS_PRIOR,
-    };
-
-    public static PriorType[] rootHeightPriors = {
-            PriorType.NONE,
-            PriorType.UNIFORM_PRIOR,
-            PriorType.EXPONENTIAL_PRIOR,
-            PriorType.NORMAL_PRIOR,
-            PriorType.LOGNORMAL_PRIOR,
-            PriorType.GAMMA_PRIOR,
-            PriorType.JEFFREYS_PRIOR,
-    };
-
-    private String[] argumentNames = new String[]{
-            "Lower Bound", "Upper Bound", "Exponential Mean", "Zero Offset", "Normal Mean", "Normal Stdev",
-            "LogNormal Mean", "LogNormal Stdev", "Zero Offset",
-            "Gamma Shape (alpha)", "Gamma Scale (beta)", "Zero Offset",
-    };
-
-    private JComboBox priorCombo;
-    private JComboBox rootHeightPriorCombo;
-    private int[][] argumentIndices = {{0, 1}, {2, 3}, {4, 5}, {6, 7, 8}, {9, 10, 11}, {}, {}, {4, 5, 0, 1}};
-    private RealNumberField initialField = new RealNumberField();
-    private RealNumberField[] argumentFields = new RealNumberField[argumentNames.length];
-    private OptionsPanel optionPanel;
-    private JChart chart;
-    private JLabel quantileLabels;
-    private JTextArea quantileText;
-
-    private JCheckBox truncatedCheck;
-    private boolean isTruncated;
-
-    private BeautiOptions.Parameter parameter;
-
-    public PriorDialog(JFrame frame) {
-        this.frame = frame;
-
-        priorCombo = new JComboBox(priors);
-        rootHeightPriorCombo = new JComboBox(rootHeightPriors);
-
-        truncatedCheck = new JCheckBox("Use a truncated normal distribution");
-        truncatedCheck.setOpaque(false);
-
-        initialField.setColumns(8);
-        for (int i = 0; i < argumentNames.length; i++) {
-            argumentFields[i] = new RealNumberField();
-            argumentFields[i].setColumns(8);
-        }
-
-        optionPanel = new OptionsPanel(12, 12);
-
-        chart = new JChart(new LinearAxis(Axis.AT_MINOR_TICK, Axis.AT_MINOR_TICK),
-                new LinearAxis(Axis.AT_ZERO, Axis.AT_DATA));
-
-        quantileLabels = new JLabel();
-        quantileLabels.setFont(quantileLabels.getFont().deriveFont(10.0f));
-        quantileLabels.setOpaque(false);
-        quantileLabels.setText("<html><p align=\"right\">Quantiles: 2.5%:<br>5%:<br>Median:<br>95%:<br>97.5%:</p></html>");
-        quantileText = new JTextArea(0, 5);
-        quantileText.setFont(quantileText.getFont().deriveFont(10.0f));
-        quantileText.setOpaque(false);
-        quantileText.setEditable(false);
-    }
-
-    public int showDialog(final BeautiOptions.Parameter parameter) {
-
-        PriorType priorType;
-
-        this.parameter = parameter;
-
-        priorType = parameter.priorType;
-
-        if (parameter.priorType == PriorType.TRUNC_NORMAL_PRIOR) {
-            isTruncated = true;
-            truncatedCheck.setSelected(isTruncated);
-            priorType = PriorType.NORMAL_PRIOR;
-        }
-
-        if (parameter.isNodeHeight) {
-            if (priorType != PriorType.NONE) {
-                rootHeightPriorCombo.setSelectedItem(priorType);
-            } else {
-                rootHeightPriorCombo.setSelectedIndex(0);
-            }
-        } else {
-            priorCombo.setSelectedItem(priorType);
-        }
-
-        if (!parameter.isStatistic) {
-            initialField.setRange(parameter.lower, parameter.upper);
-            initialField.setValue(parameter.initial);
-        }
-
-
-        setArguments();
-        setupComponents();
-
-        JOptionPane optionPane = new JOptionPane(optionPanel,
-                JOptionPane.QUESTION_MESSAGE,
-                JOptionPane.OK_CANCEL_OPTION,
-                null,
-                null,
-                null);
-        optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
-
-        final JDialog dialog = optionPane.createDialog(frame, "Prior for Parameter");
-        dialog.pack();
-
-        priorCombo.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                setupComponents();
-                dialog.pack();
-                dialog.repaint();
-            }
-        });
-
-        rootHeightPriorCombo.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                setupComponents();
-                dialog.pack();
-                dialog.repaint();
-            }
-        });
-
-        truncatedCheck.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                isTruncated = truncatedCheck.isSelected();
-                setupComponents();
-                dialog.pack();
-                dialog.repaint();
-            }
-        });
-
-        KeyListener listener = new KeyAdapter() {
-            public void keyReleased(KeyEvent e) {
-                setupChart();
-                dialog.repaint();
-            }
-        };
-        for (int i = 0; i < argumentNames.length; i++) {
-            argumentFields[i].addKeyListener(listener);
-        }
-
-        dialog.setVisible(true);
-
-        int result = JOptionPane.CANCEL_OPTION;
-        Integer value = (Integer) optionPane.getValue();
-        if (value != null && value != -1) {
-            result = value;
-        }
-
-        if (result == JOptionPane.OK_OPTION) {
-            getArguments();
-        }
-
-        return result;
-    }
-
-    private void setArguments() {
-        argumentFields[0].setRange(parameter.lower, parameter.upper);
-        argumentFields[0].setValue(parameter.uniformLower);
-        argumentFields[1].setRange(parameter.lower, parameter.upper);
-        argumentFields[1].setValue(parameter.uniformUpper);
-
-        argumentFields[2].setRange(0.0, Double.MAX_VALUE);
-        argumentFields[2].setValue(parameter.exponentialMean);
-        argumentFields[3].setValue(parameter.exponentialOffset);
-
-        argumentFields[4].setValue(parameter.normalMean);
-        argumentFields[5].setRange(0.0, Double.MAX_VALUE);
-        argumentFields[5].setValue(parameter.normalStdev);
-
-        argumentFields[6].setValue(parameter.logNormalMean);
-        argumentFields[7].setValue(parameter.logNormalStdev);
-        argumentFields[8].setValue(parameter.logNormalOffset);
-
-        argumentFields[9].setRange(0.0, Double.MAX_VALUE);
-        argumentFields[9].setValue(parameter.gammaAlpha);
-        argumentFields[10].setRange(0.0, Double.MAX_VALUE);
-        argumentFields[10].setValue(parameter.gammaBeta);
-        argumentFields[11].setValue(parameter.gammaOffset);
-    }
-
-    private void getArguments() {
-        if (parameter.isNodeHeight) {
-            if (rootHeightPriorCombo.getSelectedIndex() == 0) {
-                parameter.priorType = PriorType.NONE;
-                parameter.initial = Double.NaN;
-                return;
-            } else {
-                parameter.priorType = (PriorType) rootHeightPriorCombo.getSelectedItem();
-            }
-        } else {
-            parameter.priorType = (PriorType) priorCombo.getSelectedItem();
-        }
-
-        if (parameter.priorType == PriorType.NORMAL_PRIOR && isTruncated)
-            parameter.priorType = PriorType.TRUNC_NORMAL_PRIOR;
-
-        if (initialField.getValue() != null) parameter.initial = initialField.getValue();
-
-        switch (parameter.priorType) {
-            case UNIFORM_PRIOR:
-                if (argumentFields[0].getValue() != null) parameter.uniformLower = argumentFields[0].getValue();
-                if (argumentFields[1].getValue() != null) parameter.uniformUpper = argumentFields[1].getValue();
-                break;
-            case EXPONENTIAL_PRIOR:
-                if (argumentFields[2].getValue() != null) parameter.exponentialMean = argumentFields[2].getValue();
-                if (argumentFields[3].getValue() != null) parameter.exponentialOffset = argumentFields[3].getValue();
-                break;
-            case NORMAL_PRIOR:
-                if (argumentFields[4].getValue() != null) parameter.normalMean = argumentFields[4].getValue();
-                if (argumentFields[5].getValue() != null) parameter.normalStdev = argumentFields[5].getValue();
-                break;
-            case LOGNORMAL_PRIOR:
-                if (argumentFields[6].getValue() != null) parameter.logNormalMean = argumentFields[6].getValue();
-                if (argumentFields[7].getValue() != null) parameter.logNormalStdev = argumentFields[7].getValue();
-                if (argumentFields[8].getValue() != null) parameter.logNormalOffset = argumentFields[8].getValue();
-                break;
-            case GAMMA_PRIOR:
-                if (argumentFields[9].getValue() != null) parameter.gammaAlpha = argumentFields[9].getValue();
-                if (argumentFields[10].getValue() != null) parameter.gammaBeta = argumentFields[10].getValue();
-                if (argumentFields[11].getValue() != null) parameter.gammaOffset = argumentFields[11].getValue();
-                break;
-            case JEFFREYS_PRIOR:
-                break;
-            case TRUNC_NORMAL_PRIOR:
-                if (argumentFields[0].getValue() != null) parameter.uniformLower = argumentFields[0].getValue();
-                if (argumentFields[1].getValue() != null) parameter.uniformUpper = argumentFields[1].getValue();
-                if (argumentFields[4].getValue() != null) parameter.normalMean = argumentFields[4].getValue();
-                if (argumentFields[5].getValue() != null) parameter.normalStdev = argumentFields[5].getValue();
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown prior index");
-        }
-    }
-
-    private void setupComponents() {
-        optionPanel.removeAll();
-
-        optionPanel.addSpanningComponent(new JLabel("Select prior distribution for " + parameter.getName()));
-
-        PriorType priorType;
-        if (parameter.isNodeHeight) {
-            optionPanel.addComponents(new JLabel("Prior Distribution:"), rootHeightPriorCombo);
-            if (rootHeightPriorCombo.getSelectedIndex() == 0) {
-                return;
-            } else {
-                priorType = (PriorType) rootHeightPriorCombo.getSelectedItem();
-            }
-        } else {
-            optionPanel.addComponents(new JLabel("Prior Distribution:"), priorCombo);
-            priorType = (PriorType) priorCombo.getSelectedItem();
-        }
-
-        if (priorType == PriorType.NORMAL_PRIOR && isTruncated)
-            priorType = PriorType.TRUNC_NORMAL_PRIOR;
-
-        if (priorType != PriorType.JEFFREYS_PRIOR) {
-            optionPanel.addSeparator();
-
-            if (priorType == PriorType.NORMAL_PRIOR || priorType == PriorType.TRUNC_NORMAL_PRIOR) {
-                optionPanel.addComponent(truncatedCheck);
-            }
-
-            for (int i = 0; i < argumentIndices[priorType.ordinal() - 1].length; i++) {
-                int k = argumentIndices[priorType.ordinal() - 1][i];
-                optionPanel.addComponentWithLabel(argumentNames[k] + ":", argumentFields[k]);
-            }
-        }
-
-        if (!parameter.isStatistic) {
-            optionPanel.addSeparator();
-            optionPanel.addComponents(new JLabel("Initial Value:"), initialField);
-        }
-
-        if (priorType != PriorType.UNIFORM_PRIOR && priorType != PriorType.JEFFREYS_PRIOR) {
-            optionPanel.addSeparator();
-
-            setupChart();
-            chart.setPreferredSize(new Dimension(300, 200));
-            chart.setFontSize(8);
-            optionPanel.addSpanningComponent(chart);
-            optionPanel.addComponents(quantileLabels, quantileText);
-        }
-    }
-
-    NumberFormatter formatter = new NumberFormatter(4);
-
-    private void setupChart() {
-        chart.removeAllPlots();
-
-        PriorType priorType;
-        if (parameter.isNodeHeight) {
-            if (rootHeightPriorCombo.getSelectedIndex() == 0) {
-                return;
-            } else {
-                priorType = (PriorType) rootHeightPriorCombo.getSelectedItem();
-            }
-        } else {
-            priorType = (PriorType) priorCombo.getSelectedItem();
-        }
-
-        if (priorType == PriorType.NORMAL_PRIOR && isTruncated)
-            priorType = PriorType.TRUNC_NORMAL_PRIOR;
-
-        if (priorType == PriorType.TRUNC_NORMAL_PRIOR && !isTruncated)
-            priorType = PriorType.NORMAL_PRIOR;
-
-        Distribution distribution = null;
-        double offset = 0.0;
-        switch (priorType) {
-            case UNIFORM_PRIOR:
-                return;
-            case EXPONENTIAL_PRIOR:
-                double exponentialMean = getValue(argumentFields[2].getValue(), 1.0);
-                offset = getValue(argumentFields[3].getValue(), 0.0);
-                distribution = new ExponentialDistribution(1.0 / exponentialMean);
-                break;
-            case NORMAL_PRIOR:
-                double normalMean = getValue(argumentFields[4].getValue(), 0.0);
-                double normalStdev = getValue(argumentFields[5].getValue(), 1.0);
-                distribution = new NormalDistribution(normalMean, normalStdev);
-                break;
-            case LOGNORMAL_PRIOR:
-                double logNormalMean = getValue(argumentFields[6].getValue(), 0.0);
-                double logNormalStdev = getValue(argumentFields[7].getValue(), 1.0);
-                offset = getValue(argumentFields[8].getValue(), 0.0);
-                distribution = new LogNormalDistribution(logNormalMean, logNormalStdev);
-                break;
-            case GAMMA_PRIOR:
-                double gammaAlpha = getValue(argumentFields[9].getValue(), 1.0);
-                double gammaBeta = getValue(argumentFields[10].getValue(), 1.0);
-                offset = getValue(argumentFields[11].getValue(), 0.0);
-                distribution = new GammaDistribution(gammaAlpha, gammaBeta);
-                break;
-            case JEFFREYS_PRIOR:
-                break;
-            case TRUNC_NORMAL_PRIOR:
-                double truncNormalMean = getValue(argumentFields[4].getValue(), 0.0);
-                double truncNormalStdev = getValue(argumentFields[5].getValue(), 1.0);
-                double truncLower = getValue(argumentFields[0].getValue(), 0.0);
-                double truncUpper = getValue(argumentFields[1].getValue(), 1.0);
-                distribution = new TruncatedNormalDistribution(truncNormalMean, truncNormalStdev, truncLower, truncUpper);
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown prior index");
-
-        }
-        chart.addPlot(new PDFPlot(distribution, offset));
-        if (distribution != null) {
-            quantileText.setText(formatter.format(distribution.quantile(0.025)) +
-                    "\n" + formatter.format(distribution.quantile(0.05)) +
-                    "\n" + formatter.format(distribution.quantile(0.5)) +
-                    "\n" + formatter.format(distribution.quantile(0.95)) +
-                    "\n" + formatter.format(distribution.quantile(0.975)));
-        }
-    }
-
-    private double getValue(Double field, double defaultValue) {
-        if (field != null) {
-            return field;
-        }
-        return defaultValue;
-    }
-}
diff --git a/src/dr/app/oldbeauti/PriorType.java b/src/dr/app/oldbeauti/PriorType.java
deleted file mode 100644
index cbf7bfa..0000000
--- a/src/dr/app/oldbeauti/PriorType.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * PriorType.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import java.text.NumberFormat;
-
-/**
- * @author Alexei Drummond
- */
-public enum PriorType {
-
-    NONE,
-    UNIFORM_PRIOR,
-    EXPONENTIAL_PRIOR,
-    NORMAL_PRIOR,
-    LOGNORMAL_PRIOR,
-    GAMMA_PRIOR,
-    JEFFREYS_PRIOR,
-    POISSON_PRIOR,
-    TRUNC_NORMAL_PRIOR;
-
-    public String toString() {
-
-        switch (this) {
-            case NONE: return "none";
-            case UNIFORM_PRIOR: return "Uniform";
-            case EXPONENTIAL_PRIOR: return "Exponential";
-            case NORMAL_PRIOR: return "Normal";
-            case LOGNORMAL_PRIOR: return "Lognormal";
-            case GAMMA_PRIOR: return "Gamma";
-            case JEFFREYS_PRIOR: return "Jeffreys'";
-            case POISSON_PRIOR: return "Poisson";
-            case TRUNC_NORMAL_PRIOR: return "Truncated Normal";
-            default: return "";
-        }
-    }
-
-    public String getPriorString(BeastGenerator.Parameter param) {
-
-        NumberFormat formatter = NumberFormat.getNumberInstance();
-        StringBuffer buffer = new StringBuffer();
-
-        if (!param.priorEdited) {
-            buffer.append("* ");
-        }
-        switch (param.priorType) {
-            case NONE:
-                buffer.append("Using Tree Prior");
-                break;
-            case UNIFORM_PRIOR:
-                if (!param.isDiscrete && !param.isStatistic) {
-                    buffer.append("Uniform [");
-                    buffer.append(formatter.format(param.uniformLower));
-                    buffer.append(", ");
-                    buffer.append(formatter.format(param.uniformUpper));
-                    buffer.append("]");
-                } else {
-                    buffer.append("Uniform");
-                }
-                break;
-            case EXPONENTIAL_PRIOR:
-                buffer.append("Exponential [");
-                buffer.append(formatter.format(param.exponentialMean));
-                buffer.append("]");
-                break;
-            case NORMAL_PRIOR:
-                buffer.append("Normal [");
-                buffer.append(formatter.format(param.normalMean));
-                buffer.append(", ");
-                buffer.append(formatter.format(param.normalStdev));
-                buffer.append("]");
-                break;
-            case LOGNORMAL_PRIOR:
-                buffer.append("LogNormal [");
-                buffer.append(formatter.format(param.logNormalMean));
-                buffer.append(", ");
-                buffer.append(formatter.format(param.logNormalStdev));
-                buffer.append("]");
-                break;
-            case GAMMA_PRIOR:
-                buffer.append("Gamma [");
-                buffer.append(formatter.format(param.gammaAlpha));
-                buffer.append(", ");
-                buffer.append(formatter.format(param.gammaBeta));
-                buffer.append("]");
-                break;
-            case JEFFREYS_PRIOR:
-                buffer.append("Jeffreys");
-                break;
-            case POISSON_PRIOR:
-                buffer.append("Poisson [");
-                buffer.append(formatter.format(param.poissonMean));
-                buffer.append("]");
-                break;
-            case TRUNC_NORMAL_PRIOR:
-                buffer.append("Truncated Normal [");
-                buffer.append(formatter.format(param.normalMean));
-                buffer.append(", ");
-                buffer.append(formatter.format(param.normalStdev));
-                buffer.append("]");
-                buffer.append(" in [");
-                buffer.append(formatter.format(param.uniformLower));
-                buffer.append(", ");
-                buffer.append(formatter.format(param.uniformUpper));
-                buffer.append("]");
-
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown prior type");
-        }
-        if (param.priorType != PriorType.NONE && !param.isStatistic) {
-            buffer.append(", initial=").append(param.initial);
-        }
-
-        return buffer.toString();
-    }
-}
diff --git a/src/dr/app/oldbeauti/PriorsPanel.java b/src/dr/app/oldbeauti/PriorsPanel.java
deleted file mode 100644
index ac62b06..0000000
--- a/src/dr/app/oldbeauti/PriorsPanel.java
+++ /dev/null
@@ -1,509 +0,0 @@
-/*
- * PriorsPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.app.gui.components.RealNumberField;
-import dr.app.gui.table.TableEditorStopper;
-import dr.util.NumberFormatter;
-import dr.app.gui.components.WholeNumberField;
-import jam.framework.Exportable;
-import jam.panels.OptionsPanel;
-import jam.table.HeaderRenderer;
-import jam.table.TableRenderer;
-
-import javax.swing.*;
-import javax.swing.plaf.BorderUIResource;
-import javax.swing.table.AbstractTableModel;
-import javax.swing.table.TableCellRenderer;
-import java.awt.*;
-import java.awt.event.*;
-import java.util.ArrayList;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @version $Id: PriorsPanel.java,v 1.9 2006/09/05 13:29:34 rambaut Exp $
- */
-public class PriorsPanel extends JPanel implements Exportable {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = -2936049032365493416L;
-    JScrollPane scrollPane = new JScrollPane();
-    JTable priorTable = null;
-    PriorTableModel priorTableModel = null;
-
-    OptionsPanel treePriorPanel = new OptionsPanel();
-    JComboBox treePriorCombo;
-    JComboBox parameterizationCombo = new JComboBox(new String[]{
-            "Growth Rate", "Doubling Time"});
-    JComboBox bayesianSkylineCombo = new JComboBox(new String[]{
-            "Piecewise-constant", "Piecewise-linear"});
-    WholeNumberField groupCountField = new WholeNumberField(2, Integer.MAX_VALUE);
-
-    RealNumberField samplingProportionField = new RealNumberField(Double.MIN_VALUE, 1.0);
-
-    JCheckBox upgmaStartingTreeCheck = new JCheckBox("Use UPGMA to construct a starting tree");
-
-    public ArrayList parameters = new ArrayList();
-
-    BeautiFrame frame = null;
-
-    public PriorsPanel(BeautiFrame parent) {
-
-        this.frame = parent;
-
-        priorTableModel = new PriorTableModel();
-        priorTable = new JTable(priorTableModel);
-
-        priorTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF);
-        priorTable.getTableHeader().setReorderingAllowed(false);
-        priorTable.getTableHeader().setDefaultRenderer(
-                new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-
-        priorTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_LAST_COLUMN);
-        priorTable.getColumnModel().getColumn(0).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        priorTable.getColumnModel().getColumn(0).setPreferredWidth(160);
-
-        priorTable.getColumnModel().getColumn(1).setCellRenderer(
-                new ButtonRenderer(SwingConstants.LEFT, new Insets(0, 8, 0, 8)));
-        priorTable.getColumnModel().getColumn(1).setCellEditor(
-                new ButtonEditor(SwingConstants.LEFT, new Insets(0, 8, 0, 8)));
-        priorTable.getColumnModel().getColumn(1).setPreferredWidth(260);
-
-        priorTable.getColumnModel().getColumn(2).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        priorTable.getColumnModel().getColumn(2).setPreferredWidth(400);
-
-        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(priorTable);
-
-        scrollPane = new JScrollPane(priorTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
-
-        scrollPane.setOpaque(false);
-
-        java.awt.event.ItemListener listener = new java.awt.event.ItemListener() {
-            public void itemStateChanged(java.awt.event.ItemEvent ev) {
-                if (!settingOptions) frame.priorsChanged();
-            }
-        };
-
-        // order here must match corrosponding BeautiOptions constant, i.e. BeautiOptions.CONSTANT == 0 etc
-        if (BeautiApp.developer) {
-            treePriorCombo = new JComboBox(new String[]{
-                    "Coalescent: Constant Size",
-                    "Coalescent: Exponential Growth",
-                    "Coalescent: Logistic Growth",
-                    "Coalescent: Expansion Growth",
-                    "Coalescent: Bayesian Skyline",
-                    "Coalescent: Extended Bayesian Skyline",
-                    "Speciation: Yule Process",
-                    "Speciation: Birth-Death Process"
-            });
-        } else {
-            treePriorCombo = new JComboBox(new String[]{
-                    "Coalescent: Constant Size",
-                    "Coalescent: Exponential Growth",
-                    "Coalescent: Logistic Growth",
-                    "Coalescent: Expansion Growth",
-                    "Coalescent: Bayesian Skyline",
-                    "Coalescent: Extended Bayesian Skyline",
-                    "Speciation: Yule Process",
-                    "Speciation: Birth-Death Process"
-            });
-        }
-        setupComponent(treePriorCombo);
-        treePriorCombo.addItemListener(
-                new java.awt.event.ItemListener() {
-                    public void itemStateChanged(java.awt.event.ItemEvent ev) {
-                        if (!settingOptions) frame.priorsChanged();
-                        setupPanel();
-                    }
-                }
-        );
-
-        KeyListener keyListener = new KeyAdapter() {
-            public void keyTyped(java.awt.event.KeyEvent ev) {
-                if (!settingOptions && ev.getKeyCode() == KeyEvent.VK_ENTER) {
-                    frame.priorsChanged();
-                }
-            }
-        };
-
-        groupCountField.addKeyListener(keyListener);
-        samplingProportionField.addKeyListener(keyListener);
-
-        FocusListener focusListener = new FocusAdapter() {
-            public void focusLost(FocusEvent focusEvent) {
-                frame.priorsChanged();
-            }
-        };
-        samplingProportionField.addFocusListener(focusListener);
-        groupCountField.addFocusListener(focusListener);
-
-        setupComponent(parameterizationCombo);
-        parameterizationCombo.addItemListener(listener);
-
-        setupComponent(bayesianSkylineCombo);
-        bayesianSkylineCombo.addItemListener(listener);
-
-        setupComponent(upgmaStartingTreeCheck);
-
-        setOpaque(false);
-        setLayout(new BorderLayout(0, 0));
-        setBorder(new BorderUIResource.EmptyBorderUIResource(new java.awt.Insets(12, 12, 12, 12)));
-
-        JPanel panel = new JPanel(new BorderLayout(0, 0));
-        panel.setOpaque(false);
-        panel.add(new JLabel("Priors for model parameters and statistics:"), BorderLayout.NORTH);
-        panel.add(scrollPane, BorderLayout.CENTER);
-        panel.add(new JLabel("* Marked parameters currently have a default prior distribution. " +
-                "You should check that these are appropriate."), BorderLayout.SOUTH);
-
-        treePriorPanel.setBorder(null);
-        add(treePriorPanel, BorderLayout.NORTH);
-        add(panel, BorderLayout.CENTER);
-    }
-
-    private void setupComponent(JComponent comp) {
-        comp.setOpaque(false);
-
-        if (comp instanceof JButton) {
-            comp.putClientProperty("JButton.buttonType", "roundRect");
-        }
-        if (comp instanceof JComboBox) {
-            comp.putClientProperty("JComboBox.isSquare", Boolean.TRUE);
-        }
-    }
-
-    private void setupPanel() {
-
-        treePriorPanel.removeAll();
-
-        treePriorPanel.addComponentWithLabel("Tree Prior:", treePriorCombo);
-        if (treePriorCombo.getSelectedIndex() == 1 || // exponential
-                treePriorCombo.getSelectedIndex() == 2 || // logistic
-                treePriorCombo.getSelectedIndex() == 3) { // expansion
-            treePriorPanel.addComponentWithLabel("Parameterization for growth:", parameterizationCombo);
-        } else if (treePriorCombo.getSelectedIndex() == 4) { // bayesian skyline
-            groupCountField.setColumns(6);
-            treePriorPanel.addComponentWithLabel("Number of groups:", groupCountField);
-            treePriorPanel.addComponentWithLabel("Skyline Model:", bayesianSkylineCombo);
-        } else if (treePriorCombo.getSelectedIndex() == 6) { // birth-death
-            samplingProportionField.setColumns(8);
-            treePriorPanel.addComponentWithLabel("Proportion of taxa sampled:", samplingProportionField);
-        }
-
-        treePriorPanel.addComponent(upgmaStartingTreeCheck);
-
-        validate();
-        repaint();
-    }
-
-    private boolean settingOptions = false;
-
-    public void setOptions(BeautiOptions options) {
-        settingOptions = true;
-        parameters = options.selectParameters();
-        priorTableModel.fireTableDataChanged();
-
-        treePriorCombo.setSelectedIndex(options.nodeHeightPrior);
-
-        groupCountField.setValue(options.skylineGroupCount);
-        samplingProportionField.setValue(options.birthDeathSamplingProportion);
-
-        parameterizationCombo.setSelectedIndex(options.parameterization);
-        bayesianSkylineCombo.setSelectedIndex(options.skylineModel);
-
-        upgmaStartingTreeCheck.setSelected(options.upgmaStartingTree);
-
-        setupPanel();
-
-        settingOptions = false;
-
-        validate();
-        repaint();
-    }
-
-    private PriorDialog priorDialog = null;
-    private DiscretePriorDialog discretePriorDialog = null;
-
-    private void priorButtonPressed(int row) {
-        BeautiOptions.Parameter param = (BeautiOptions.Parameter) parameters.get(row);
-
-        if (param.isDiscrete) {
-            if (discretePriorDialog == null) {
-                discretePriorDialog = new DiscretePriorDialog(frame);
-            }
-
-            if (discretePriorDialog.showDialog(param) == JOptionPane.CANCEL_OPTION) {
-                return;
-            }
-        } else {
-            if (priorDialog == null) {
-                priorDialog = new PriorDialog(frame);
-            }
-
-            if (priorDialog.showDialog(param) == JOptionPane.CANCEL_OPTION) {
-                return;
-            }
-        }
-        param.priorEdited = true;
-
-        priorTableModel.fireTableDataChanged();
-    }
-
-    public void getOptions(BeautiOptions options) {
-        if (settingOptions) return;
-
-        if (treePriorCombo.getSelectedIndex() == BeautiOptions.CONSTANT) {
-            options.nodeHeightPrior = BeautiOptions.CONSTANT;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.EXPONENTIAL) {
-            options.nodeHeightPrior = BeautiOptions.EXPONENTIAL;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.LOGISTIC) {
-            options.nodeHeightPrior = BeautiOptions.LOGISTIC;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.EXPANSION) {
-            options.nodeHeightPrior = BeautiOptions.EXPANSION;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.SKYLINE) {
-            options.nodeHeightPrior = BeautiOptions.SKYLINE;
-            Integer groupCount = groupCountField.getValue();
-            if (groupCount != null) {
-                options.skylineGroupCount = groupCount;
-            } else {
-                options.skylineGroupCount = 5;
-            }
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.EXTENDED_SKYLINE) {
-            options.nodeHeightPrior = BeautiOptions.EXTENDED_SKYLINE;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.YULE) {
-            options.nodeHeightPrior = BeautiOptions.YULE;
-        } else if (treePriorCombo.getSelectedIndex() == BeautiOptions.BIRTH_DEATH) {
-            options.nodeHeightPrior = BeautiOptions.BIRTH_DEATH;
-            Double samplingProportion = samplingProportionField.getValue();
-            if (samplingProportion != null) {
-                options.birthDeathSamplingProportion = samplingProportion;
-            } else {
-                options.birthDeathSamplingProportion = 1.0;
-            }
-        } else {
-            throw new RuntimeException("Unexpected value from treePriorCombo");
-        }
-
-        options.parameterization = parameterizationCombo.getSelectedIndex();
-        options.skylineModel = bayesianSkylineCombo.getSelectedIndex();
-
-        options.upgmaStartingTree = upgmaStartingTreeCheck.isSelected();
-    }
-
-    public JComponent getExportableComponent() {
-        return priorTable;
-    }
-
-    NumberFormatter formatter = new NumberFormatter(4);
-
-    class PriorTableModel extends AbstractTableModel {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -8864178122484971872L;
-        String[] columnNames = {"Parameter", "Prior", "Description"};
-
-        public PriorTableModel() {
-        }
-
-        public int getColumnCount() {
-            return columnNames.length;
-        }
-
-        public int getRowCount() {
-            return parameters.size();
-        }
-
-        public Object getValueAt(int row, int col) {
-            BeastGenerator.Parameter param = (BeastGenerator.Parameter) parameters.get(row);
-            switch (col) {
-                case 0:
-                    return param.getName();
-                case 1:
-                    return param.priorType.getPriorString(param);
-                case 2:
-                    return param.getDescription();
-            }
-            return null;
-        }
-
-        public String getColumnName(int column) {
-            return columnNames[column];
-        }
-
-        public Class getColumnClass(int c) {
-            return getValueAt(0, c).getClass();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            return col == 1;
-        }
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    }
-
-    class DoubleRenderer extends TableRenderer {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -2614341608257369805L;
-
-        public DoubleRenderer(int alignment, Insets insets) {
-
-            super(true, alignment, insets);
-        }
-
-        public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
-                                                       boolean hasFocus, int row, int column) {
-
-            String s;
-            if (((Double) value).isNaN()) {
-                s = "random";
-            } else {
-                s = formatter.format((Double) value);
-            }
-            return super.getTableCellRendererComponent(table, s, isSelected, hasFocus, row, column);
-
-        }
-    }
-
-    public class ButtonRenderer extends JButton implements TableCellRenderer {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -2416184092883649169L;
-
-        public ButtonRenderer(int alignment, Insets insets) {
-            setOpaque(true);
-            setHorizontalAlignment(alignment);
-            setMargin(insets);
-        }
-
-        public Component getTableCellRendererComponent(JTable table, Object value,
-                                                       boolean isSelected, boolean hasFocus, int row, int column) {
-            setEnabled(table.isEnabled());
-            setFont(table.getFont());
-            if (isSelected) {
-                setForeground(table.getSelectionForeground());
-                setBackground(table.getSelectionBackground());
-            } else {
-                setForeground(table.getForeground());
-                setBackground(UIManager.getColor("Button.background"));
-            }
-            setText((value == null) ? "" : value.toString());
-            return this;
-        }
-    }
-
-    public class ButtonEditor extends DefaultCellEditor {
-        /**
-         *
-         */
-        private static final long serialVersionUID = 6372738480075411674L;
-        protected JButton button;
-        private String label;
-        private boolean isPushed;
-        private int row;
-
-        public ButtonEditor(int alignment, Insets insets) {
-            super(new JCheckBox());
-            button = new JButton();
-            button.setOpaque(true);
-            button.setHorizontalAlignment(alignment);
-            button.setMargin(insets);
-            button.addActionListener(new ActionListener() {
-                public void actionPerformed(ActionEvent e) {
-                    fireEditingStopped();
-                }
-            });
-        }
-
-        public Component getTableCellEditorComponent(JTable table, Object value,
-                                                     boolean isSelected, int row, int column) {
-            button.setEnabled(table.isEnabled());
-            button.setFont(table.getFont());
-            if (isSelected) {
-                button.setForeground(table.getSelectionForeground());
-                button.setBackground(table.getSelectionBackground());
-            } else {
-                button.setForeground(table.getForeground());
-                button.setBackground(table.getBackground());
-            }
-            label = (value == null) ? "" : value.toString();
-            button.setText(label);
-            isPushed = true;
-            this.row = row;
-            return button;
-        }
-
-        public Object getCellEditorValue() {
-            if (isPushed) {
-                priorButtonPressed(row);
-            }
-            isPushed = false;
-            return label;
-        }
-
-        public boolean stopCellEditing() {
-            isPushed = false;
-            return super.stopCellEditing();
-        }
-
-        protected void fireEditingStopped() {
-            super.fireEditingStopped();
-        }
-    }
-}
diff --git a/src/dr/app/oldbeauti/TaxaPanel.java b/src/dr/app/oldbeauti/TaxaPanel.java
deleted file mode 100644
index 97fe0ef..0000000
--- a/src/dr/app/oldbeauti/TaxaPanel.java
+++ /dev/null
@@ -1,816 +0,0 @@
-/*
- * TaxaPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.evolution.util.Taxa;
-import dr.evolution.util.Taxon;
-import dr.evolution.util.TaxonList;
-import jam.framework.Exportable;
-import jam.panels.ActionPanel;
-import jam.table.TableRenderer;
-import jam.util.IconUtils;
-
-import javax.swing.*;
-import javax.swing.event.ListSelectionEvent;
-import javax.swing.event.ListSelectionListener;
-import javax.swing.plaf.BorderUIResource;
-import javax.swing.table.AbstractTableModel;
-import javax.swing.table.TableColumn;
-import javax.swing.table.TableColumnModel;
-import java.awt.*;
-import java.awt.event.*;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * @author			Andrew Rambaut
- * @author			Alexei Drummond
- * @version			$Id: TaxaPanel.java,v 1.1 2006/09/05 13:29:34 rambaut Exp $
- */
-public class TaxaPanel extends JPanel implements Exportable {
-
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = -3138832889782090814L;
-
-    private final String TAXON_SET_DEFAULT = "taxon set...";
-
-    BeautiFrame frame = null;
-
-    BeautiOptions options = null;
-
-    private TaxonList taxa = null;
-    private JTable taxonSetsTable = null;
-    private TaxonSetsTableModel taxonSetsTableModel = null;
-
-    private JPanel taxonSetEditingPanel = null;
-
-    private Taxa currentTaxonSet = null;
-
-    private List<Taxon> includedTaxa = new ArrayList<Taxon>();
-    private List<Taxon> excludedTaxa = new ArrayList<Taxon>();
-
-    private JTable excludedTaxaTable = null;
-    private TaxaTableModel excludedTaxaTableModel = null;
-    private JComboBox excludedTaxonSetsComboBox = null;
-    private boolean excludedSelectionChanging = false;
-
-    private JTable includedTaxaTable = null;
-    private TaxaTableModel includedTaxaTableModel = null;
-    private JComboBox includedTaxonSetsComboBox = null;
-    private boolean includedSelectionChanging = false;
-
-    private static int taxonSetCount = 0;
-
-    public TaxaPanel(BeautiFrame parent) {
-
-        this.frame = parent;
-
-        Icon includeIcon = null, excludeIcon = null;
-        try {
-            includeIcon = new ImageIcon(IconUtils.getImage(this.getClass(), "images/include.png"));
-            excludeIcon = new ImageIcon(IconUtils.getImage(this.getClass(), "images/exclude.png"));
-        } catch (Exception e) {
-            // do nothing
-        }
-
-        // Taxon Sets
-        taxonSetsTableModel = new TaxonSetsTableModel();
-        taxonSetsTable = new JTable(taxonSetsTableModel);
-        final TableColumnModel model = taxonSetsTable.getColumnModel();
-        final TableColumn tableColumn0 = model.getColumn(0);
-        tableColumn0.setCellRenderer(new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        tableColumn0.setMinWidth(20);
-
-        //final TableColumn tableColumn1 = model.getColumn(1);
-
-        taxonSetsTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent evt) { taxonSetsTableSelectionChanged(); }
-        });
-
-        JScrollPane scrollPane1 = new JScrollPane(taxonSetsTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
-
-        ActionPanel actionPanel1 = new ActionPanel(false);
-        actionPanel1.setAddAction(addTaxonSetAction);
-        actionPanel1.setRemoveAction(removeTaxonSetAction);
-
-        addTaxonSetAction.setEnabled(false);
-        removeTaxonSetAction.setEnabled(false);
-
-        JPanel controlPanel1 = new JPanel(new FlowLayout(FlowLayout.LEFT));
-        controlPanel1.add(actionPanel1);
-
-        // Excluded Taxon List
-        excludedTaxaTableModel = new TaxaTableModel(false);
-        excludedTaxaTable = new JTable(excludedTaxaTableModel);
-
-        excludedTaxaTable.getColumnModel().getColumn(0).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        excludedTaxaTable.getColumnModel().getColumn(0).setMinWidth(20);
-
-        excludedTaxaTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent evt) { excludedTaxaTableSelectionChanged(); }
-        });
-
-        JScrollPane scrollPane2 = new JScrollPane(excludedTaxaTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
-
-        Box panel1 = new Box(BoxLayout.X_AXIS);
-        panel1.add(new JLabel("Select: "));
-        panel1.setOpaque(false);
-        excludedTaxonSetsComboBox = new JComboBox(new String[] { TAXON_SET_DEFAULT });
-        excludedTaxonSetsComboBox.setOpaque(false);
-        panel1.add(excludedTaxonSetsComboBox);
-
-        JPanel buttonPanel = createAddRemoveButtonPanel(includeTaxonAction, includeIcon, "Include selected taxa in the taxon set",
-                excludeTaxonAction, excludeIcon, "Exclude selected taxa from the taxon set",
-                javax.swing.BoxLayout.Y_AXIS);
-
-        // Included Taxon List
-        includedTaxaTableModel = new TaxaTableModel(true);
-        includedTaxaTable = new JTable(includedTaxaTableModel);
-
-        includedTaxaTable.getColumnModel().getColumn(0).setCellRenderer(
-                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        includedTaxaTable.getColumnModel().getColumn(0).setMinWidth(20);
-
-        includedTaxaTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent evt) { includedTaxaTableSelectionChanged(); }
-        });
-        includedTaxaTable.doLayout();
-
-        JScrollPane scrollPane3 = new JScrollPane(includedTaxaTable,
-                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
-                JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
-
-        Box panel2 = new Box(BoxLayout.X_AXIS);
-        panel2.add(new JLabel("Select: "));
-        panel2.setOpaque(false);
-        includedTaxonSetsComboBox = new JComboBox(new String[] { TAXON_SET_DEFAULT });
-        includedTaxonSetsComboBox.setOpaque(false);
-        panel2.add(includedTaxonSetsComboBox);
-
-        taxonSetEditingPanel = new JPanel();
-        taxonSetEditingPanel.setBorder(BorderFactory.createTitledBorder("Taxon Set: none selected"));
-        taxonSetEditingPanel.setOpaque(false);
-        taxonSetEditingPanel.setLayout(new GridBagLayout());
-        GridBagConstraints c = new GridBagConstraints();
-
-        c.gridx = 0;
-        c.gridy = 0;
-        c.weightx = 0.5;
-        c.weighty = 1;
-        c.fill = GridBagConstraints.BOTH;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(12,12,4,0);
-        taxonSetEditingPanel.add(scrollPane2, c);
-
-        c.gridx = 0;
-        c.gridy = 1;
-        c.weightx = 0.5;
-        c.weighty = 0;
-        c.fill = GridBagConstraints.HORIZONTAL;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(0,12,12,0);
-        taxonSetEditingPanel.add(panel1, c);
-
-        c.gridx = 1;
-        c.gridy = 0;
-        c.weightx = 0;
-        c.weighty = 1;
-        c.gridheight = 2;
-        c.fill = GridBagConstraints.NONE;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(12,2,12,4);
-        taxonSetEditingPanel.add(buttonPanel, c);
-
-        c.gridx = 2;
-        c.gridy = 0;
-        c.weightx = 0.5;
-        c.weighty = 1;
-        c.gridheight = 1;
-        c.fill = GridBagConstraints.BOTH;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(12,0,4,12);
-        taxonSetEditingPanel.add(scrollPane3, c);
-
-        c.gridx = 2;
-        c.gridy = 1;
-        c.weightx = 0.5;
-        c.weighty = 0;
-        c.fill = GridBagConstraints.HORIZONTAL;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(0,0,12,12);
-        taxonSetEditingPanel.add(panel2, c);
-
-        JPanel panel3 = new JPanel();
-        panel3.setOpaque(false);
-        panel3.setLayout(new GridBagLayout());
-        c = new GridBagConstraints();
-
-        c.gridx = 0;
-        c.gridy = 0;
-        c.weightx = 0.4;
-        c.weighty = 1;
-        c.fill = GridBagConstraints.BOTH;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(0,0,2,12);
-        panel3.add(scrollPane1, c);
-
-        c.gridx = 0;
-        c.gridy = 1;
-        c.weightx = 0;
-        c.weighty = 0;
-        c.fill = GridBagConstraints.NONE;
-        c.anchor = GridBagConstraints.WEST;
-        c.insets = new Insets(2,0,0,12);
-        panel3.add(actionPanel1, c);
-
-        c.gridx = 1;
-        c.gridy = 0;
-        c.weightx = 0.6;
-        c.weighty = 1;
-        c.fill = GridBagConstraints.BOTH;
-        c.anchor = GridBagConstraints.CENTER;
-        c.insets = new Insets(0,0,0,0);
-        panel3.add(taxonSetEditingPanel, c);
-
-        setOpaque(false);
-        setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
-        setLayout(new BorderLayout(0,0));
-        add(panel3, BorderLayout.CENTER);
-
-//		taxonSetsTable.addMouseListener(new MouseAdapter() {
-//			public void mouseClicked(MouseEvent e) {
-//				if (e.getClickCount() == 2) {
-//					JTable target = (JTable)e.getSource();
-//					int row = target.getSelectedRow();
-//					taxonSetsTableDoubleClicked(row);
-//				}
-//			}
-//		});
-
-        includedTaxaTable.addMouseListener(new MouseAdapter() {
-            public void mouseClicked(MouseEvent e) {
-                if (e.getClickCount() == 2) {
-                    includeSelectedTaxa();
-                }
-            }
-        });
-        excludedTaxaTable.addMouseListener(new MouseAdapter() {
-            public void mouseClicked(MouseEvent e) {
-                if (e.getClickCount() == 2) {
-                    excludeSelectedTaxa();
-                }
-            }
-        });
-
-        includedTaxaTable.addFocusListener(new FocusAdapter() {
-            public void focusGained(FocusEvent focusEvent) {
-                excludedTaxaTable.clearSelection();
-            }
-        });
-        excludedTaxaTable.addFocusListener(new FocusAdapter() {
-            public void focusGained(FocusEvent focusEvent) {
-                includedTaxaTable.clearSelection();
-            }
-        });
-
-        includedTaxaTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent e) {
-                if (!includedSelectionChanging && includedTaxonSetsComboBox.getSelectedIndex() != 0) {
-                    includedTaxonSetsComboBox.setSelectedIndex(0);
-                }
-            }
-        });
-        includedTaxonSetsComboBox.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                includedSelectionChanging = true;
-                includedTaxaTable.clearSelection();
-                if (includedTaxonSetsComboBox.getSelectedIndex() > 0) {
-                    Taxa taxa = (Taxa)includedTaxonSetsComboBox.getSelectedItem();
-                    for (int i =0; i < taxa.getTaxonCount(); i++) {
-                        Taxon taxon = taxa.getTaxon(i);
-                        int index = includedTaxa.indexOf(taxon);
-                        includedTaxaTable.getSelectionModel().addSelectionInterval(index, index);
-
-                    }
-                }
-                includedSelectionChanging = false;
-            }
-        });
-
-        excludedTaxaTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
-            public void valueChanged(ListSelectionEvent e) {
-                if (!excludedSelectionChanging && excludedTaxonSetsComboBox.getSelectedIndex() != 0) {
-                    excludedTaxonSetsComboBox.setSelectedIndex(0);
-                }
-            }
-        });
-        excludedTaxonSetsComboBox.addItemListener(new ItemListener() {
-            public void itemStateChanged(ItemEvent e) {
-                excludedSelectionChanging = true;
-                excludedTaxaTable.clearSelection();
-                if (excludedTaxonSetsComboBox.getSelectedIndex() > 0) {
-                    Taxa taxa = (Taxa)excludedTaxonSetsComboBox.getSelectedItem();
-                    for (int i =0; i < taxa.getTaxonCount(); i++) {
-                        Taxon taxon = taxa.getTaxon(i);
-                        int index = excludedTaxa.indexOf(taxon);
-                        excludedTaxaTable.getSelectionModel().addSelectionInterval(index, index);
-
-                    }
-                }
-                excludedSelectionChanging = false;
-            }
-        });
-
-        taxonSetsTable.doLayout();
-        includedTaxaTable.doLayout();
-        excludedTaxaTable.doLayout();
-    }
-
-    private void taxonSetChanged() {
-        currentTaxonSet.removeAllTaxa();
-        for (Taxon anIncludedTaxa : includedTaxa) {
-            currentTaxonSet.addTaxon(anIncludedTaxa);
-        }
-
-        setupTaxonSetsComboBoxes();
-
-        if (options.taxonSetsMono.get(currentTaxonSet) != null &&
-                options.taxonSetsMono.get(currentTaxonSet) &&
-                !checkCompatibility(currentTaxonSet)) {
-            options.taxonSetsMono.put(currentTaxonSet, Boolean.FALSE);
-        }
-
-        frame.taxonSetsChanged();
-    }
-
-    public void setOptions(BeautiOptions options) {
-
-        this.options = options;
-
-        taxa = options.alignment;
-        if (taxa == null) {
-            addTaxonSetAction.setEnabled(false);
-            removeTaxonSetAction.setEnabled(false);
-        } else {
-            addTaxonSetAction.setEnabled(true);
-        }
-
-        taxonSetsTableSelectionChanged();
-        taxonSetsTableModel.fireTableDataChanged();
-    }
-
-    public void getOptions(BeautiOptions options) {
-//		options.datesUnits = unitsCombo.getSelectedIndex();
-//		options.datesDirection = directionCombo.getSelectedIndex();
-//		options.translation = translationCombo.getSelectedIndex();
-    }
-
-    public JComponent getExportableComponent() {
-        return taxonSetsTable;
-    }
-
-    private void taxonSetsTableSelectionChanged() {
-        int[] rows = taxonSetsTable.getSelectedRows();
-        if (rows.length == 0) {
-            removeTaxonSetAction.setEnabled(false);
-        } else if (rows.length == 1) {
-            currentTaxonSet = options.taxonSets.get(rows[0]);
-            setCurrentTaxonSet(currentTaxonSet);
-            removeTaxonSetAction.setEnabled(true);
-        } else {
-            setCurrentTaxonSet(null);
-            removeTaxonSetAction.setEnabled(true);
-        }
-    }
-
-//	private void taxonSetsTableDoubleClicked(int row) {
-//		currentTaxonSet = (Taxa)taxonSets.get(row);
-//
-//		Collections.sort(taxonSets);
-//		taxonSetsTableModel.fireTableDataChanged();
-//
-//		setCurrentTaxonSet(currentTaxonSet);
-//
-//		int sel = taxonSets.indexOf(currentTaxonSet);
-//		taxonSetsTable.setRowSelectionInterval(sel, sel);
-//	}
-
-    Action addTaxonSetAction = new AbstractAction("+") {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = 20273987098143413L;
-
-        public void actionPerformed(ActionEvent ae) {
-            taxonSetCount ++;
-            currentTaxonSet = new Taxa("untitled" + taxonSetCount);
-
-            options.taxonSets.add(currentTaxonSet);
-            Collections.sort(options.taxonSets);
-
-            options.taxonSetsMono.put(currentTaxonSet, Boolean.FALSE);
-
-            taxonSetsTableModel.fireTableDataChanged();
-
-            int sel = options.taxonSets.indexOf(currentTaxonSet);
-            taxonSetsTable.setRowSelectionInterval(sel, sel);
-
-            taxonSetChanged();
-        }
-    };
-
-    Action removeTaxonSetAction = new AbstractAction("-") {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = 6077578872870122265L;
-
-        public void actionPerformed(ActionEvent ae) {
-            int row = taxonSetsTable.getSelectedRow();
-            if (row != -1) {
-                Taxa taxa = options.taxonSets.remove(row);
-                options.taxonSetsMono.remove(taxa);
-            }
-            taxonSetChanged();
-
-            taxonSetsTableModel.fireTableDataChanged();
-
-            if (row >= options.taxonSets.size()) {
-                row = options.taxonSets.size() - 1;
-            }
-            if (row >= 0) {
-                taxonSetsTable.setRowSelectionInterval(row, row);
-            } else {
-                setCurrentTaxonSet(null);
-            }
-        }
-    };
-
-    private void setCurrentTaxonSet(Taxa taxonSet) {
-
-        this.currentTaxonSet = taxonSet;
-
-        includedTaxa.clear();
-        excludedTaxa.clear();
-
-        if (currentTaxonSet != null) {
-            for (int i = 0; i < taxonSet.getTaxonCount(); i++) {
-                includedTaxa.add(taxonSet.getTaxon(i));
-            }
-            Collections.sort(includedTaxa);
-
-            for (int i = 0; i < taxa.getTaxonCount(); i++) {
-                excludedTaxa.add(taxa.getTaxon(i));
-            }
-            excludedTaxa.removeAll(includedTaxa);
-            Collections.sort(excludedTaxa);
-        }
-
-        setTaxonSetTitle();
-
-        setupTaxonSetsComboBoxes();
-
-        includedTaxaTableModel.fireTableDataChanged();
-        excludedTaxaTableModel.fireTableDataChanged();
-    }
-
-    private void setTaxonSetTitle() {
-
-        if (currentTaxonSet == null) {
-            taxonSetEditingPanel.setBorder(BorderFactory.createTitledBorder(""));
-            taxonSetEditingPanel.setEnabled(false);
-        } else {
-            taxonSetEditingPanel.setEnabled(true);
-            taxonSetEditingPanel.setBorder(BorderFactory.createTitledBorder("Taxon Set: " + currentTaxonSet.getId()));
-        }
-    }
-
-
-    private void setupTaxonSetsComboBoxes() {
-        setupTaxonSetsComboBox(excludedTaxonSetsComboBox, excludedTaxa);
-        excludedTaxonSetsComboBox.setSelectedIndex(0);
-        setupTaxonSetsComboBox(includedTaxonSetsComboBox, includedTaxa);
-        includedTaxonSetsComboBox.setSelectedIndex(0);
-    }
-
-    private void setupTaxonSetsComboBox(JComboBox comboBox, List availableTaxa) {
-        comboBox.removeAllItems();
-
-        comboBox.addItem(TAXON_SET_DEFAULT);
-        for (Taxa taxa : options.taxonSets) {
-            if (taxa != currentTaxonSet) {
-                if (isCompatible(taxa, availableTaxa)) {
-                    comboBox.addItem(taxa);
-                }
-            }
-        }
-    }
-
-    /**
-     * Returns true if taxa are all found in availableTaxa
-     * @param taxa
-     * @param availableTaxa
-     * @return true if the taxa are all found in availableTaxa
-     */
-    private boolean isCompatible(Taxa taxa, List availableTaxa) {
-
-        for (int i = 0; i < taxa.getTaxonCount(); i++) {
-            Taxon taxon = taxa.getTaxon(i);
-            if (!availableTaxa.contains(taxon)) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    private boolean checkCompatibility(Taxa taxa) {
-        for (Taxa taxa2 : options.taxonSets) {
-            if (taxa2 != taxa && options.taxonSetsMono.get(taxa2)) {
-                if (taxa.containsAny(taxa2) && !taxa.containsAll(taxa2) && !taxa2.containsAll(taxa)) {
-                    JOptionPane.showMessageDialog(frame,
-                            "You cannot enforce monophyly on this taxon set \n" +
-                                    "because it is not compatible with another taxon \n" +
-                                    "set, " + taxa2.getId() + ", for which monophyly is\n" +
-                                    "enforced.",
-                            "Warning",
-                            JOptionPane.WARNING_MESSAGE);
-                    return false;
-                }
-            }
-        }
-        return true;
-    }
-
-    class TaxonSetsTableModel extends AbstractTableModel {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = 3318461381525023153L;
-
-        public TaxonSetsTableModel() {
-        }
-
-        public int getColumnCount() {
-            return 2;
-        }
-
-        public int getRowCount() {
-            if (options == null) return 0;
-            return options.taxonSets.size();
-        }
-
-        public Object getValueAt(int rowIndex, int columnIndex) {
-            Taxa taxonSet = options.taxonSets.get(rowIndex);
-            switch(columnIndex) {
-                case 0: return taxonSet.getId();
-                case 1: return options.taxonSetsMono.get(taxonSet);
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
-            Taxa taxonSet = options.taxonSets.get(rowIndex);
-            switch(columnIndex) {
-                case 0: {
-                    taxonSet.setId(aValue.toString());
-                    setTaxonSetTitle();
-                    break;
-                }
-                case 1: {
-                    if ((Boolean)aValue) {
-                        Taxa taxa = options.taxonSets.get(rowIndex);
-                        if (checkCompatibility(taxa)) {
-                            options.taxonSetsMono.put(taxonSet, (Boolean)aValue);
-                        }
-                    } else {
-                        options.taxonSetsMono.put(taxonSet, (Boolean)aValue);
-                    }
-                    break;
-                }
-            }
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            return true;
-        }
-
-        public String getColumnName(int column) {
-            switch(column) {
-                case 0: return "Taxon Sets";
-                case 1: return "Monophyletic?";
-            }
-            return null;
-        }
-
-        public Class getColumnClass(int c) {
-            return getValueAt(0, c).getClass();
-        }
-    }
-
-    private JPanel createAddRemoveButtonPanel(Action addAction, Icon addIcon, String addToolTip,
-                                              Action removeAction, Icon removeIcon, String removeToolTip, int axis) {
-
-        JPanel buttonPanel = new JPanel();
-        buttonPanel.setLayout(new BoxLayout(buttonPanel, axis));
-        buttonPanel.setOpaque(false);
-        JButton addButton = new JButton(addAction);
-        if (addIcon != null) {
-            addButton.setIcon(addIcon);
-            addButton.setText(null);
-        }
-        addButton.setToolTipText(addToolTip);
-        addButton.putClientProperty("JButton.buttonType", "toolbar");
-        addButton.setOpaque(false);
-        addAction.setEnabled(false);
-
-        JButton removeButton = new JButton(removeAction);
-        if (removeIcon != null) {
-            removeButton.setIcon(removeIcon);
-            removeButton.setText(null);
-        }
-        removeButton.setToolTipText(removeToolTip);
-        removeButton.putClientProperty("JButton.buttonType", "toolbar");
-        removeButton.setOpaque(false);
-        removeAction.setEnabled(false);
-
-        buttonPanel.add(addButton);
-        buttonPanel.add(new JToolBar.Separator(new Dimension(6,6)));
-        buttonPanel.add(removeButton);
-
-        return buttonPanel;
-    }
-
-    private void excludedTaxaTableSelectionChanged() {
-        if (excludedTaxaTable.getSelectedRowCount() == 0) {
-            includeTaxonAction.setEnabled(false);
-        } else {
-            includeTaxonAction.setEnabled(true);
-        }
-    }
-
-    private void includedTaxaTableSelectionChanged() {
-        if (includedTaxaTable.getSelectedRowCount() == 0) {
-            excludeTaxonAction.setEnabled(false);
-        } else {
-            excludeTaxonAction.setEnabled(true);
-        }
-    }
-
-    private void includeSelectedTaxa() {
-        int[] rows = excludedTaxaTable.getSelectedRows();
-
-        List<Taxon> transfer = new ArrayList<Taxon>();
-
-        for (int r : rows) {
-            transfer.add(excludedTaxa.get(r));
-        }
-
-        includedTaxa.addAll(transfer);
-        Collections.sort(includedTaxa);
-
-        excludedTaxa.removeAll(includedTaxa);
-
-        includedTaxaTableModel.fireTableDataChanged();
-        excludedTaxaTableModel.fireTableDataChanged();
-
-        includedTaxaTable.getSelectionModel().clearSelection();
-        for (Taxon taxon : transfer) {
-            int row = includedTaxa.indexOf(taxon);
-            includedTaxaTable.getSelectionModel().addSelectionInterval(row, row);
-        }
-
-        taxonSetChanged();
-    }
-
-    private void excludeSelectedTaxa() {
-        int[] rows = includedTaxaTable.getSelectedRows();
-
-        List<Taxon> transfer = new ArrayList<Taxon>();
-
-        for (int r : rows) {
-            transfer.add(includedTaxa.get(r));
-        }
-
-        excludedTaxa.addAll(transfer);
-        Collections.sort(excludedTaxa);
-
-        includedTaxa.removeAll(excludedTaxa);
-
-        includedTaxaTableModel.fireTableDataChanged();
-        excludedTaxaTableModel.fireTableDataChanged();
-
-        excludedTaxaTable.getSelectionModel().clearSelection();
-        for (Taxon taxon : transfer) {
-            int row = excludedTaxa.indexOf(taxon);
-            excludedTaxaTable.getSelectionModel().addSelectionInterval(row, row);
-        }
-
-        taxonSetChanged();
-    }
-
-    Action includeTaxonAction = new AbstractAction("->") {
-        /**
-         *
-         */
-        private static final long serialVersionUID = 7510299673661594128L;
-
-        public void actionPerformed(ActionEvent ae) {
-            includeSelectedTaxa();
-        }
-    };
-
-    Action excludeTaxonAction = new AbstractAction("<-") {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = 449692708602410206L;
-
-        public void actionPerformed(ActionEvent ae) {
-            excludeSelectedTaxa();
-        }
-    };
-
-    class TaxaTableModel extends AbstractTableModel {
-
-        /**
-         *
-         */
-        private static final long serialVersionUID = -8027482229525938010L;
-        boolean included;
-
-        public TaxaTableModel(boolean included) {
-            this.included = included;
-        }
-
-        public int getColumnCount() {
-            return 1;
-        }
-
-        public int getRowCount() {
-            if (currentTaxonSet == null) return 0;
-
-            if (included) {
-                return includedTaxa.size();
-            } else {
-                return excludedTaxa.size();
-            }
-        }
-
-        public Object getValueAt(int row, int col) {
-
-            if (included) {
-                return includedTaxa.get(row).getId();
-            } else {
-                return excludedTaxa.get(row).getId();
-            }
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            return false;
-        }
-
-        public String getColumnName(int column) {
-            if (included) return "Included Taxa";
-            else return "Excluded Taxa";
-        }
-
-        public Class getColumnClass(int c) {return getValueAt(0, c).getClass();}
-    }
-
-}
diff --git a/src/dr/app/oldbeauti/XMLWriter.java b/src/dr/app/oldbeauti/XMLWriter.java
deleted file mode 100644
index 9437102..0000000
--- a/src/dr/app/oldbeauti/XMLWriter.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * XMLWriter.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.oldbeauti;
-
-import dr.util.Attribute;
-
-import java.io.Writer;
-
-/**
- * @author			Alexei Drummond
- * @version			$Id: XMLWriter.java,v 1.3 2005/06/27 21:18:40 rambaut Exp $
- */
-public class XMLWriter extends java.io.PrintWriter {
-
-	int level = 0;
-
-	public XMLWriter(Writer writer) {
-		super(writer);
-	}
-
-	public void increaseLevel() { level += 1; }
-	public void decreaseLevel() { level -= 1; }
-
-	public void writeComment(String comment) {
-		writeComment(comment, 80);
-	}
-
-	public void writeComment(String comment, int length) {
-		StringBuffer buffer = new StringBuffer("<!-- ");
-		buffer.append(comment);
-		for (int i = buffer.length(); i < (length - 3); i++) {
-			buffer.append(' ');
-		}
-		buffer.append("-->");
-		writeText(buffer.toString());
-	}
-
-	public void writeOpenTag(String tagname) {
-		writeText("<" + tagname + ">");
-		increaseLevel();
-	}
-
-	public void writeOpenTag(String tagname, Attribute attribute) {
-		writeTag(tagname, new Attribute[] {attribute}, false);
-	}
-
-	public void writeOpenTag(String tagname, Attribute[] attributes) {
-		writeTag(tagname, attributes, false);
-	}
-
-	public void writeTag(String tagname, Attribute attribute, boolean close) {
-		writeTag(tagname, new Attribute[] { attribute }, close);
-	}
-
-	public void writeTag(String tagname, Attribute[] attributes, boolean close) {
-		StringBuffer buffer = new StringBuffer("<");
-		buffer.append(tagname);
-        for (Attribute attribute : attributes) {
-            buffer.append(' ');
-            buffer.append(attribute.getAttributeName());
-            buffer.append("=\"");
-            buffer.append(attribute.getAttributeValue());
-            buffer.append("\"");
-        }
-		if (close) {
-			buffer.append("/");
-		}
-		buffer.append(">");
-		writeText(buffer.toString());
-		if (!close) {
-			increaseLevel();
-		}
-	}
-
-    public void writeTag(String tagname, Attribute[] attributes, String content, boolean close) {
-        StringBuffer buffer = new StringBuffer("<");
-        buffer.append(tagname);
-        for (Attribute attribute : attributes) {
-            buffer.append(' ');
-            buffer.append(attribute.getAttributeName());
-            buffer.append("=\"");
-            buffer.append(attribute.getAttributeValue());
-            buffer.append("\"");
-        }
-        if (content != null) {
-            buffer.append(">");
-            buffer.append(content);
-            if (close) {
-                buffer.append("</");
-                buffer.append(tagname);
-                //buffer.append("/");
-            }
-        } else if (close) {
-            buffer.append("/");
-        }
-        buffer.append(">");
-        writeText(buffer.toString());
-        if (!close) {
-            increaseLevel();
-        }
-    }
-
-	public void writeCloseTag(String tagname) {
-		decreaseLevel();
-		writeText("</" + tagname + ">");
-	}
-
-	public void writeText(String string) {
-		for (int i =0; i < level; i++) {
-			write('\t');
-		}
-		println(string);
-	}
-}
-
diff --git a/src/dr/app/pathogen/TemporalRooting.java b/src/dr/app/pathogen/TemporalRooting.java
index 39e641e..4d57a86 100644
--- a/src/dr/app/pathogen/TemporalRooting.java
+++ b/src/dr/app/pathogen/TemporalRooting.java
@@ -436,7 +436,9 @@ public class TemporalRooting {
         double sum_y = 0.0;
         double sum_ty = 0.0;
         double sum_tc = 0.0;
-
+        double Nd = N;
+        double nd = n;  // need to set these naughty guys to doubles
+        
         for (int i = 0; i < N; i++) {
             sum_tt += t[i] * t[i];
             sum_t += t[i];
@@ -444,14 +446,13 @@ public class TemporalRooting {
             sum_ty += t[i] * y[i];
             sum_tc += t[i] * c[i];
         }
-        double y_bar = sum_y / N;
-        double t_bar = sum_t / N;
+        double y_bar = sum_y / Nd;
+        double t_bar = sum_t / Nd;
 
-        double C = sum_tt - (sum_t * sum_t / N);
+        double C = sum_tt - (sum_t * sum_t / Nd);
         double sumAB = 0.0;
         double sumAA = 0.0;
-        double Nd = N;
-        double nd = n;  // need to set these naughty ones to doubles
+      
         for (int i = 0; i < N; i++) {
             double Ai = 2*c[i] - 
             		    ((2*nd-Nd)/Nd) +
@@ -462,7 +463,6 @@ public class TemporalRooting {
             sumAB += Ai * Bi;
             sumAA += Ai * Ai;
         }
-
         double x = -sumAB / (sumLength * sumAA);
         x = Math.min(Math.max(x, 0.0), 1.0);
 
@@ -593,3 +593,4 @@ public class TemporalRooting {
     }
 
 }
+
diff --git a/src/dr/app/tempest/ParentPlot.java b/src/dr/app/tempest/ParentPlot.java
new file mode 100644
index 0000000..a0b342c
--- /dev/null
+++ b/src/dr/app/tempest/ParentPlot.java
@@ -0,0 +1,129 @@
+/*
+ * ParentPlot.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.app.gui.chart.Plot;
+import dr.stats.Variate;
+
+import java.awt.*;
+import java.awt.geom.GeneralPath;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Description:	A line plot.
+ *
+ * @author Andrew Rambaut
+ * @version $Id$
+ */
+
+public class ParentPlot extends Plot.AbstractPlot {
+
+
+    /**
+     * Constructor
+     */
+    public ParentPlot(Variate xData, Variate yData, List<Double> xParentData, List<Double> yParentData) {
+        super(xParentData, yParentData);
+
+        this.xTipData = xData;
+        this.yTipData = yData;
+
+        this.xParentData = new Variate.D(xParentData);
+        this.yParentData = new Variate.D(yParentData);
+    }
+
+    /**
+     * Paint data series
+     */
+    protected void paintData(Graphics2D g2, Variate.N xData, Variate.N yData) {
+
+        g2.setPaint(linePaint);
+        g2.setStroke(lineStroke);
+
+        if (getSelectedPoints() != null && getSelectedPoints().size() > 0) {
+            for (int i : getSelectedPoints()) {
+
+                double x = ((Number) xTipData.get(i)).doubleValue();
+                double y = ((Number) yTipData.get(i)).doubleValue();
+
+                double x1 = transformX(x);
+                double y1 = transformY(y);
+
+                double x2 = transformX(((Number) xData.get(0)).doubleValue());
+                double y2 = transformY(((Number) yData.get(0)).doubleValue());
+
+                GeneralPath path = new GeneralPath();
+                path.moveTo((float) x1, (float) y1);
+//            path.lineTo((float) x2, (float) y1);
+                path.lineTo((float) x2, (float) y2);
+
+                g2.draw(path);
+            }
+        } else {
+        for (int i = 0; i < xData.getCount(); i++) {
+
+            double x1 = transformX(((Number) xTipData.get(i)).doubleValue());
+            double y1 = transformY(((Number) yTipData.get(i)).doubleValue());
+
+            double x2 = transformX(((Number) xData.get(i)).doubleValue());
+            double y2 = transformY(((Number) yData.get(i)).doubleValue());
+
+            GeneralPath path = new GeneralPath();
+            path.moveTo((float) x1, (float) y1);
+//            path.lineTo((float) x2, (float) y1);
+            path.lineTo((float) x2, (float) y2);
+
+            g2.draw(path);
+        }
+        }
+
+
+	}
+
+    private final Variate xTipData;
+    private final Variate yTipData;
+
+    private final Variate.N xParentData;
+    private final Variate.N yParentData;
+
+    public void setSelectedPoints(Set<Integer> selectedPoints, double mrcaTime, double mrcaDistance) {
+        List<Double> x = new ArrayList<Double>();
+        x.add(mrcaTime);
+        List<Double> y = new ArrayList<Double>();
+        y.add(mrcaDistance);
+        setData(x, y);
+        setSelectedPoints(selectedPoints);
+    }
+
+    public void clearSelection() {
+        setData(xParentData, yParentData);
+        super.clearSelection();
+    }
+
+}
+
diff --git a/src/dr/app/tempest/RootToTip.java b/src/dr/app/tempest/RootToTip.java
new file mode 100644
index 0000000..e12b693
--- /dev/null
+++ b/src/dr/app/tempest/RootToTip.java
@@ -0,0 +1,328 @@
+/*
+ * RootToTip.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.app.beauti.options.DateGuesser;
+import dr.app.util.Arguments;
+import dr.app.tools.NexusExporter;
+import dr.evolution.io.Importer;
+import dr.evolution.io.NexusImporter;
+import dr.evolution.io.TreeImporter;
+import dr.evolution.tree.Tree;
+import dr.evolution.util.TaxonList;
+import dr.stats.Regression;
+import dr.stats.Variate;
+import dr.util.Version;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+ * Essentially a command line version of TempEst. Written to
+ * perform the analysis on sets of trees.
+ * 
+ * @author Andrew Rambaut
+ */
+
+public class RootToTip {
+
+    private final static Version version = new Version() {
+        @Override
+        public String getVersion() {
+            return "1.5";
+        }
+
+        @Override
+        public String getVersionString() {
+            return "v1.5";
+        }
+
+        @Override
+        public String getBuildString() {
+            return "";
+        }
+
+        @Override
+        public String getDateString() {
+            return "2003-2015";
+        }
+
+        @Override
+        public String[] getCredits() {
+            return new String[0];
+        }
+
+        @Override
+        public String getHTMLCredits() {
+            return "";
+        }
+    };
+
+    public RootToTip(int burnin, String dateOrder, final boolean keepRoot, String outgroup,
+                     boolean writeTree, String inputFileName, String outputFileName) throws IOException {
+
+        System.out.println("Reading tree(s)...");
+
+        boolean firstTree = true;
+        FileReader fileReader = new FileReader(inputFileName);
+        TreeImporter importer = new NexusImporter(fileReader);
+
+        List<Regression> regressions = new ArrayList<Regression>();
+        List<Tree> trees = new ArrayList<Tree>();
+
+        DateGuesser dg = new DateGuesser();
+        dg.fromLast = false;
+        if (dateOrder.equals("FIRST")) {
+            dg.order = 0;
+        } else if (dateOrder.equals("LAST")) {
+            dg.order = 0;
+            dg.fromLast = true;
+        } else {
+            dg.order = Integer.parseInt(dateOrder) - 1;
+            if (dg.order < 0 || dg.order > 100) {
+                System.err.println("Error Parsing order of date field: " + dateOrder);
+            }
+        }
+
+        TaxonList taxa = null;
+        TemporalRooting temporalRooting = null;
+
+        try {
+            while (importer.hasTree()) {
+                Tree tree = importer.importNextTree();
+
+                if (firstTree) {
+                    taxa = tree;
+
+                    dg.guessDates(taxa);
+
+                    temporalRooting = new TemporalRooting(taxa);
+
+                    firstTree = false;
+                }
+
+                if (totalTrees >= burnin) {
+                    Tree rootedTree = tree;
+
+                    if (!keepRoot) {
+                        rootedTree = temporalRooting.findRoot(tree, TemporalRooting.RootingFunction.CORRELATION);
+                    }
+
+                    regressions.add(temporalRooting.getRootToTipRegression(rootedTree));
+
+                    if (writeTree) {
+                        trees.add(rootedTree);
+                    }
+                    totalTreesUsed += 1;
+                }
+                totalTrees += 1;
+
+            }
+        } catch (Importer.ImportException e) {
+            System.err.println("Error Parsing Input Tree: " + e.getMessage());
+            return;
+        }
+        fileReader.close();
+
+        PrintWriter printWriter;
+
+        if (!writeTree && outputFileName != null) {
+            printWriter = new PrintWriter(outputFileName);
+        } else {
+            printWriter = new PrintWriter(System.out);
+        }
+
+        if (regressions.size() == 1) {
+            Regression r = regressions.get(0);
+
+            Variate dates = r.getXData();
+            Variate distances = r.getYData();
+
+            printWriter.println("date\tdistance");
+            for (int i = 0; i < dates.getCount(); i++) {
+                printWriter.println(dates.get(i) + "\t" + distances.get(i));
+            }
+            printWriter.println();
+            printWriter.println("Regression slope = " + r.getGradient());
+            printWriter.println("X-Intercept = " + r.getXIntercept());
+            printWriter.println("Y-Intercept = " + r.getYIntercept());
+            printWriter.println("Residual mean squared = " + r.getResidualMeanSquared());
+            printWriter.println("R^2 = " + r.getRSquared());
+            printWriter.println("Correlation coefficient = " + r.getCorrelationCoefficient());
+
+        } else {
+            printWriter.println("tree\tslope\tx-intercept\ty-intercept\tcorrelation");
+            int i = 1;
+            for (Regression r : regressions) {
+                printWriter.print("\t" + i);
+                printWriter.print("\t" + r.getGradient());
+                printWriter.print("\t" + r.getXIntercept());
+                printWriter.println("\t" + r.getYIntercept());
+                printWriter.println("\t" + r.getCorrelationCoefficient());
+            }
+
+        }
+
+        printWriter.close();
+
+        if (writeTree) {
+            PrintStream printStream;
+
+            if (outputFileName != null) {
+                printStream = new PrintStream(outputFileName);
+            } else {
+                printStream = new PrintStream(System.out);
+            }
+
+            NexusExporter exporter = new NexusExporter(printStream);
+            Tree[] treeArray = new Tree[trees.size()];
+            trees.toArray(treeArray);
+
+            exporter.exportTrees(treeArray);
+
+            printStream.close();
+        }
+    }
+
+    int totalTrees = 0;
+    int totalTreesUsed = 0;
+
+    public static void printTitle() {
+        System.out.println();
+        centreLine("RootToTip " + version.getVersionString() + ", " + version.getDateString(), 60);
+        centreLine("Root to tip distance vs. time of sampling", 60);
+        centreLine("by", 60);
+        centreLine("Andrew Rambaut", 60);
+        System.out.println();
+        System.out.println();
+    }
+
+    public static void centreLine(String line, int pageWidth) {
+        int n = pageWidth - line.length();
+        int n1 = n / 2;
+        for (int i = 0; i < n1; i++) {
+            System.out.print(" ");
+        }
+        System.out.println(line);
+    }
+
+
+    public static void printUsage(Arguments arguments) {
+
+        arguments.printUsage("roottotip", "<input-file-name> [<output-file-name>]");
+        System.out.println();
+        System.out.println("  Example: roottotip -burnin 100 test.trees rootToTip.txt");
+        System.out.println();
+    }
+
+    //Main method
+    public static void main(String[] args) throws IOException {
+
+        String inputFileName = null;
+        String outputFileName = null;
+
+        printTitle();
+
+        Arguments arguments = new Arguments(
+                new Arguments.Option[]{
+                        new Arguments.IntegerOption("burnin", "the number of trees to be ignored as 'burn-in' [default = 0]"),
+                        new Arguments.StringOption("dateorder", "date_order", "order of date field in taxon name: first, last, 1, 2 etc. [default = last]"),
+//                        new Arguments.StringOption("outgroup", "{taxon list}", "one or more taxa that will be used to root the tree(s) [default = find root]"),
+                        new Arguments.Option("keeproot", "keep the existing root of the input trees [default = estimate root]"),
+                        new Arguments.Option("writetree", "Write the optimally rooted tree to the output file"),
+                        new Arguments.Option("help", "option to print this message"),
+                });
+
+        try {
+            arguments.parseArguments(args);
+        } catch (Arguments.ArgumentException ae) {
+            System.out.println(ae);
+            printUsage(arguments);
+            System.exit(1);
+        }
+
+        if (arguments.hasOption("help")) {
+            printUsage(arguments);
+            System.exit(0);
+        }
+
+        int burnin = 0;
+        if (arguments.hasOption("burnin")) {
+            burnin = arguments.getIntegerOption("burnin");
+        }
+
+        String dateOrder = "LAST";
+        if (arguments.hasOption("dateorder")) {
+            dateOrder = arguments.getStringOption("dateorder").toUpperCase();
+        }
+
+        String outgroup = null;
+        if (arguments.hasOption("outgroup")) {
+            outgroup = arguments.getStringOption("dateorder").toUpperCase();
+        }
+
+        boolean keepRoot = arguments.hasOption("keeproot");
+
+        boolean writeTree = arguments.hasOption("writetree");
+
+        String[] args2 = arguments.getLeftoverArguments();
+
+        if (args2.length > 2) {
+            System.err.println("Unknown option: " + args2[2]);
+            System.err.println();
+            printUsage(arguments);
+            System.exit(1);
+        }
+
+        if (args2.length == 0) {
+            System.err.println("Missing input file name");
+            printUsage(arguments);
+            System.exit(1);
+        }
+
+
+        inputFileName = args2[0];
+        if (args2.length == 2) {
+            outputFileName = args2[1];
+        }
+
+        new RootToTip(burnin,
+                dateOrder,
+                keepRoot,
+                outgroup,
+                writeTree,
+                inputFileName,
+                outputFileName
+        );
+
+        System.exit(0);
+    }
+
+}
\ No newline at end of file
diff --git a/src/dr/app/tempest/SamplesPanel.java b/src/dr/app/tempest/SamplesPanel.java
new file mode 100644
index 0000000..2671eec
--- /dev/null
+++ b/src/dr/app/tempest/SamplesPanel.java
@@ -0,0 +1,461 @@
+/*
+ * SamplesPanel.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.app.beauti.options.DateGuesser;
+import dr.app.beauti.tipdatepanel.GuessDatesDialog;
+import dr.app.beauti.util.PanelUtils;
+import dr.evolution.util.Date;
+import dr.evolution.util.TimeScale;
+import dr.evolution.util.Units;
+import dr.evolution.util.TaxonList;
+import dr.app.gui.table.*;
+import jam.framework.Exportable;
+import jam.table.HeaderRenderer;
+import dr.app.gui.table.TableEditorStopper;
+import jam.table.TableRenderer;
+
+import javax.swing.*;
+import javax.swing.event.ListSelectionEvent;
+import javax.swing.event.ListSelectionListener;
+import javax.swing.plaf.BorderUIResource;
+import javax.swing.table.AbstractTableModel;
+import java.awt.*;
+import java.awt.event.ActionEvent;
+import java.awt.event.ItemEvent;
+import java.awt.event.ItemListener;
+
+/**
+ * @author Andrew Rambaut
+ * @version $Id: DataPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
+ */
+public class SamplesPanel extends JPanel implements Exportable {
+
+    /**
+     *
+     */
+    private static final long serialVersionUID = 5283922195494563924L;
+    JScrollPane scrollPane = new JScrollPane();
+    JTable dataTable = null;
+    DataTableModel dataTableModel = null;
+
+    ClearDatesAction clearDatesAction = new ClearDatesAction();
+    GuessDatesAction guessDatesAction = new GuessDatesAction();
+
+    JComboBox unitsCombo = new JComboBox(new String[]{"Years", "Months", "Days"});
+    JComboBox directionCombo = new JComboBox(new String[]{"Since some time in the past", "Before the present"});
+
+    TempestFrame frame = null;
+
+    TaxonList taxonList = null;
+
+    int datesUnits;
+    int datesDirection;
+    double maximumTipHeight = 0.0;
+
+    DateGuesser guesser = new DateGuesser();
+
+    double[] heights = null;
+
+    GuessDatesDialog guessDatesDialog = null;
+
+    public SamplesPanel(TempestFrame parent, TaxonList taxonList) {
+
+        this.frame = parent;
+
+        dataTableModel = new DataTableModel();
+        TableSorter sorter = new TableSorter(dataTableModel);
+        dataTable = new JTable(sorter);
+
+        sorter.setTableHeader(dataTable.getTableHeader());
+
+        dataTable.getTableHeader().setReorderingAllowed(false);
+        dataTable.getTableHeader().setDefaultRenderer(
+                new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+
+        dataTable.getColumnModel().getColumn(0).setCellRenderer(
+                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+        dataTable.getColumnModel().getColumn(0).setPreferredWidth(80);
+
+        dataTable.getColumnModel().getColumn(1).setCellRenderer(
+                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+        dataTable.getColumnModel().getColumn(1).setPreferredWidth(80);
+        dataTable.getColumnModel().getColumn(1).setCellEditor(
+                new DateCellEditor());
+
+        dataTable.getColumnModel().getColumn(2).setCellRenderer(
+                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+        dataTable.getColumnModel().getColumn(2).setPreferredWidth(80);
+        dataTable.getColumnModel().getColumn(2).setCellEditor(
+                new DateCellEditor());
+
+        dataTable.getColumnModel().getColumn(3).setCellRenderer(
+                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+        dataTable.getColumnModel().getColumn(3).setPreferredWidth(80);
+
+        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(dataTable);
+
+        dataTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
+            public void valueChanged(ListSelectionEvent evt) {
+                selectionChanged();
+            }
+        });
+
+        scrollPane = new JScrollPane(dataTable,
+                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
+                JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
+        scrollPane.setOpaque(false);
+
+        PanelUtils.setupComponent(unitsCombo);
+        PanelUtils.setupComponent(directionCombo);
+
+        JToolBar toolBar1 = new JToolBar();
+        toolBar1.setFloatable(false);
+        toolBar1.setOpaque(false);
+
+        toolBar1.setLayout(new FlowLayout(FlowLayout.LEFT, 0, 0));
+        JButton button = new JButton(clearDatesAction);
+        PanelUtils.setupComponent(button);
+        toolBar1.add(button);
+        button = new JButton(guessDatesAction);
+        PanelUtils.setupComponent(button);
+        toolBar1.add(button);
+        toolBar1.add(new JToolBar.Separator(new Dimension(12, 12)));
+        final JLabel unitsLabel = new JLabel("Dates specified as ");
+        toolBar1.add(unitsLabel);
+        toolBar1.add(unitsCombo);
+        toolBar1.add(directionCombo);
+
+        setOpaque(false);
+        setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
+        setLayout(new BorderLayout(0, 0));
+
+        add(toolBar1, "North");
+        add(scrollPane, "Center");
+
+        ItemListener listener = new ItemListener() {
+            public void itemStateChanged(ItemEvent ev) {
+                timeScaleChanged();
+            }
+        };
+        unitsCombo.addItemListener(listener);
+        directionCombo.addItemListener(listener);
+
+        setTaxonList(taxonList);
+    }
+
+    public final void timeScaleChanged() {
+        Units.Type units = Units.Type.YEARS;
+        switch (unitsCombo.getSelectedIndex()) {
+            case 0:
+                units = Units.Type.YEARS;
+                break;
+            case 1:
+                units = Units.Type.MONTHS;
+                break;
+            case 2:
+                units = Units.Type.DAYS;
+                break;
+        }
+
+        boolean backwards = directionCombo.getSelectedIndex() == 1;
+
+        for (int i = 0; i < taxonList.getTaxonCount(); i++) {
+            Date date = taxonList.getTaxon(i).getDate();
+            double d = date.getTimeValue();
+
+            Date newDate = createDate(d, units, backwards, 0.0);
+
+            newDate.setPrecision(date.getPrecision());
+
+            taxonList.getTaxon(i).setDate(newDate);
+        }
+
+        calculateHeights();
+
+        dataTableModel.fireTableDataChanged();
+        frame.timeScaleChanged();
+    }
+
+    private Date createDate(double timeValue, Units.Type units, boolean backwards, double origin) {
+        if (backwards) {
+            return Date.createTimeAgoFromOrigin(timeValue, units, origin);
+        } else {
+            return Date.createTimeSinceOrigin(timeValue, units, origin);
+        }
+    }
+
+    private void setTaxonList(TaxonList taxonList) {
+        this.taxonList = taxonList;
+
+        setupTable();
+
+        unitsCombo.setSelectedIndex(datesUnits);
+        directionCombo.setSelectedIndex(datesDirection);
+
+        calculateHeights();
+
+        dataTableModel.fireTableDataChanged();
+    }
+
+    private void setupTable() {
+        dataTableModel.fireTableDataChanged();
+    }
+
+    public void getOptions() {
+        datesUnits = unitsCombo.getSelectedIndex();
+        datesDirection = directionCombo.getSelectedIndex();
+    }
+
+    public JComponent getExportableComponent() {
+        return dataTable;
+    }
+
+    public void selectionChanged() {
+        // nothing to do
+    }
+
+    public void clearDates() {
+        for (int i = 0; i < taxonList.getTaxonCount(); i++) {
+            java.util.Date origin = new java.util.Date(0);
+
+            double d = 0.0;
+
+            Date date = Date.createTimeSinceOrigin(d, Units.Type.YEARS, origin);
+            taxonList.getTaxon(i).setAttribute("date", date);
+        }
+
+        // adjust the dates to the current timescale...
+        timeScaleChanged();
+
+        dataTableModel.fireTableDataChanged();
+    }
+
+    public void guessDates() {
+
+        if (guessDatesDialog == null) {
+            guessDatesDialog = new GuessDatesDialog(frame);
+        }
+
+        int result = guessDatesDialog.showDialog();
+
+        if (result == -1 || result == JOptionPane.CANCEL_OPTION) {
+            return;
+        }
+
+        guesser.guessDates = true;
+        guessDatesDialog.setupGuesser(guesser);
+
+        String warningMessage = null;
+
+        guesser.guessDates(taxonList);
+
+        if (warningMessage != null) {
+            JOptionPane.showMessageDialog(this, "Warning: some dates may not be set correctly - \n" + warningMessage,
+                    "Error guessing dates",
+                    JOptionPane.WARNING_MESSAGE);
+        }
+
+        // adjust the dates to the current timescale...
+        timeScaleChanged();
+
+        dataTableModel.fireTableDataChanged();
+    }
+
+    public class ClearDatesAction extends AbstractAction {
+        /**
+         *
+         */
+        private static final long serialVersionUID = -7281309694753868635L;
+
+        public ClearDatesAction() {
+            super("Clear Dates");
+            setToolTipText("Use this tool to remove sampling dates from each taxon");
+        }
+
+        public void actionPerformed(ActionEvent ae) {
+            clearDates();
+        }
+    }
+
+    public class GuessDatesAction extends AbstractAction {
+        /**
+         *
+         */
+        private static final long serialVersionUID = 8514706149822252033L;
+
+        public GuessDatesAction() {
+            super("Guess Dates");
+            setToolTipText("Use this tool to guess the sampling dates from the taxon labels");
+        }
+
+        public void actionPerformed(ActionEvent ae) {
+            guessDates();
+        }
+    }
+
+    private void calculateHeights() {
+
+        maximumTipHeight = 0.0;
+        if (taxonList == null || taxonList.getTaxonCount() == 0) return;
+
+        heights = null;
+
+        Date mostRecent = null;
+        for (int i = 0; i < taxonList.getTaxonCount(); i++) {
+            Date date = taxonList.getTaxon(i).getDate();
+            if ((date != null) && (mostRecent == null || date.after(mostRecent))) {
+                mostRecent = date;
+            }
+        }
+
+        if (mostRecent != null) {
+            heights = new double[taxonList.getTaxonCount()];
+
+            TimeScale timeScale = new TimeScale(mostRecent.getUnits(), true, mostRecent.getAbsoluteTimeValue());
+            double time0 = timeScale.convertTime(mostRecent.getTimeValue(), mostRecent);
+
+            for (int i = 0; i < taxonList.getTaxonCount(); i++) {
+                Date date = taxonList.getTaxon(i).getDate();
+                if (date != null) {
+                    heights[i] = timeScale.convertTime(date.getTimeValue(), date) - time0;
+                    if (heights[i] > maximumTipHeight) maximumTipHeight = heights[i];
+                }
+            }
+        }
+    }
+
+    class DataTableModel extends AbstractTableModel {
+
+        /**
+         *
+         */
+        private static final long serialVersionUID = -6707994233020715574L;
+        String[] columnNames = {"Name", "Date", "Precision", "Height"};
+
+        public DataTableModel() {
+        }
+
+        public int getColumnCount() {
+            return columnNames.length;
+        }
+
+        public int getRowCount() {
+            if (taxonList == null) return 0;
+
+            return taxonList.getTaxonCount();
+        }
+
+        public Object getValueAt(int row, int col) {
+            Date date = taxonList.getTaxon(row).getDate();
+            switch (col) {
+                case 0:
+                    return taxonList.getTaxonId(row);
+                case 1:
+                    if (date != null) {
+                        return date.getTimeValue();
+                    } else {
+                        return "-";
+                    }
+                case 2:
+                    if (date != null) {
+                        return date.getPrecision();
+                    } else {
+                        return "-";
+                    }
+                case 3:
+                    if (heights != null) {
+                        return heights[row];
+                    } else {
+                        return "0.0";
+                    }
+            }
+            return null;
+        }
+
+        public void setValueAt(Object aValue, int row, int col) {
+            if (col == 0) {
+                taxonList.getTaxon(row).setId(aValue.toString());
+            } else if (col == 1) {
+                Date date = taxonList.getTaxon(row).getDate();
+                if (date != null) {
+                    double d = (Double) aValue;
+                    Date newDate = createDate(d, date.getUnits(), date.isBackwards(), date.getOrigin());
+                    taxonList.getTaxon(row).setDate(newDate);
+                }
+            } else if (col == 2) {
+                Date date = taxonList.getTaxon(row).getDate();
+                if (date != null) {
+                    double d = (Double) aValue;
+                    if (d >= 0.0) {
+                        date.setPrecision(d);
+                    }
+                }
+            }
+
+            timeScaleChanged();
+        }
+
+        public boolean isCellEditable(int row, int col) {
+            if (col == 0) return true;
+            if (col == 1 || col == 2) {
+                Date date = taxonList.getTaxon(row).getDate();
+                return (date != null);
+            }
+            return false;
+        }
+
+        public String getColumnName(int column) {
+            return columnNames[column];
+        }
+
+        public Class getColumnClass(int c) {
+            return getValueAt(0, c).getClass();
+        }
+
+        public String toString() {
+            StringBuffer buffer = new StringBuffer();
+
+            buffer.append(getColumnName(0));
+            for (int j = 1; j < getColumnCount(); j++) {
+                buffer.append("\t");
+                buffer.append(getColumnName(j));
+            }
+            buffer.append("\n");
+
+            for (int i = 0; i < getRowCount(); i++) {
+                buffer.append(getValueAt(i, 0));
+                for (int j = 1; j < getColumnCount(); j++) {
+                    buffer.append("\t");
+                    buffer.append(getValueAt(i, j));
+                }
+                buffer.append("\n");
+            }
+
+            return buffer.toString();
+        }
+    }
+}
diff --git a/src/dr/app/tempest/TempEstApp.java b/src/dr/app/tempest/TempEstApp.java
new file mode 100644
index 0000000..50fa98b
--- /dev/null
+++ b/src/dr/app/tempest/TempEstApp.java
@@ -0,0 +1,141 @@
+/*
+ * TempestApp.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.app.util.OSType;
+import dr.util.Version;
+import jam.framework.*;
+
+import javax.swing.*;
+import java.awt.*;
+
+/**
+ * @author Andrew Rambaut
+ * @version $Id$
+ */
+public class TempEstApp extends MultiDocApplication {
+    private final static Version version = new Version() {
+        private static final String VERSION = "1.5";
+
+        public String getVersion() {
+            return VERSION;
+        }
+
+        public String getVersionString() {
+            return "v" + VERSION;
+        }
+
+        public String getDateString() {
+            return "2003-2015";
+        }
+
+        public String getBuildString() {
+            return "Build r3656";
+        }
+
+        public String[] getCredits() {
+            return new String[0];
+        }
+
+        public String getHTMLCredits() {
+            return "<p>by<br>" +
+                    "Andrew Rambaut</p>" +
+                    "<p>Institute of Evolutionary Biology, University of Edinburgh<br>" +
+                    "<a href=\"mailto:a.rambaut at ed.ac.uk\">a.rambaut at ed.ac.uk</a></p>" +
+                    "<p>Part of the BEAST package:<br>" +
+                    "<a href=\"http://beast.bio.ed.ac.uk/\">http://beast.bio.ed.ac.uk/</a></p>";
+        }
+
+    };
+
+    public TempEstApp(String nameString, String aboutString, Icon icon,
+                      String websiteURLString, String helpURLString) {
+        super(new TempestMenuBarFactory(), nameString, aboutString, icon, websiteURLString, helpURLString);
+    }
+
+    // Main entry point
+    static public void main(String[] args) {
+
+
+        if (OSType.isMac()) {
+            System.setProperty("apple.laf.useScreenMenuBar","true");
+            System.setProperty("apple.awt.showGrowBox","true");
+            System.setProperty("apple.awt.graphics.UseQuartz","true");
+            UIManager.put("SystemFont", new Font("Lucida Grande", Font.PLAIN, 13));
+            UIManager.put("SmallSystemFont", new Font("Lucida Grande", Font.PLAIN, 11));
+        }
+
+        try {
+
+            try {
+                SwingUtilities.invokeAndWait(new Runnable() {
+                    public void run() {
+                        try {
+                            UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
+                        } catch (Exception e) {
+                            e.printStackTrace();
+                        }
+                    }
+                });
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+            java.net.URL url = TempEstApp.class.getResource("images/tempest.png");
+            Icon icon = null;
+
+            if (url != null) {
+                icon = new ImageIcon(url);
+            }
+
+            final String nameString = "TempEst";
+            final String versionString = version.getVersionString();
+            String aboutString = "<html><center><p>Temporal Signal Estimator Tool<br>" +
+                    "Version " + versionString + ", " + version.getDateString() + "</p>" +
+                    version.getHTMLCredits() +
+                    "</center></html>";
+
+            String websiteURLString = "http://tree.bio.ed.ac.uk/";
+            String helpURLString = "http://tree.bio.ed.ac.uk/software/tempest";
+
+            TempEstApp app = new TempEstApp(nameString, aboutString, icon,
+                    websiteURLString, helpURLString);
+            app.setDocumentFrameFactory(new DocumentFrameFactory() {
+                public DocumentFrame createDocumentFrame(Application app, MenuBarFactory menuBarFactory) {
+                    return new TempestFrame(nameString);
+                }
+            });
+            app.initialize();
+            app.doOpen();
+        } catch (Exception e) {
+            JOptionPane.showMessageDialog(new JFrame(), "Fatal exception: " + e,
+                    "Please report this to the authors",
+                    JOptionPane.ERROR_MESSAGE);
+            e.printStackTrace();
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/src/dr/app/oldbeauti/BeautiDefaultFileMenuFactory.java b/src/dr/app/tempest/TempestDefaultFileMenuFactory.java
similarity index 59%
rename from src/dr/app/oldbeauti/BeautiDefaultFileMenuFactory.java
rename to src/dr/app/tempest/TempestDefaultFileMenuFactory.java
index 26eddf9..2863608 100644
--- a/src/dr/app/oldbeauti/BeautiDefaultFileMenuFactory.java
+++ b/src/dr/app/tempest/TempestDefaultFileMenuFactory.java
@@ -1,5 +1,5 @@
 /*
- * BeautiDefaultFileMenuFactory.java
+ * TempestDefaultFileMenuFactory.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,7 +23,7 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.oldbeauti;
+package dr.app.tempest;
 
 import jam.framework.AbstractFrame;
 import jam.framework.Application;
@@ -38,10 +38,10 @@ import java.awt.event.KeyEvent;
  *         Date: Dec 26, 2004
  *         Time: 11:01:06 AM
  */
-public class BeautiDefaultFileMenuFactory implements MenuFactory {
+public class TempestDefaultFileMenuFactory implements MenuFactory {
 
 
-    public BeautiDefaultFileMenuFactory() {
+    public TempestDefaultFileMenuFactory() {
     }
 
     public String getMenuName() {
@@ -59,25 +59,46 @@ public class BeautiDefaultFileMenuFactory implements MenuFactory {
         item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, MenuBarFactory.MENU_MASK));
         menu.add(item);
 
-        item = new JMenuItem(frame.getImportAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_I, MenuBarFactory.MENU_MASK));
+        item = new JMenuItem(application.getOpenAction());
+        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, MenuBarFactory.MENU_MASK));
         menu.add(item);
 
-        menu.addSeparator();
-
-        item = new JMenuItem(frame.getOpenAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, MenuBarFactory.MENU_MASK));
+        item = new JMenuItem(frame.getSaveAction());
+        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK));
         menu.add(item);
 
         item = new JMenuItem(frame.getSaveAsAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK));
         menu.add(item);
 
         menu.addSeparator();
 
-        item = new JMenuItem(frame.getExportAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK));
-        menu.add(item);
+        // On Windows and Linux platforms, each window has its own menu so items which are not needed
+        // are simply missing. In contrast, on Mac, the menu is for the application so items should
+        // be enabled/disabled as frames come to the front.
+        if (frame instanceof TempestFrame) {
+//            Action action = frame.getImportAction();
+//            if (action != null) {
+//                item = new JMenuItem(action);
+//                item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_I, MenuBarFactory.MENU_MASK));
+//                menu.add(item);
+//
+//                menu.addSeparator();
+//            }
+
+            item = new JMenuItem(((TempestFrame)frame).getExportTreeAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK));
+            menu.add(item);
+
+//            item = new JMenuItem(((TemporalSamplerFrame)frame).getExportGraphicAction());
+//            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK + KeyEvent.ALT_MASK));
+//            menu.add(item);
+
+            item = new JMenuItem(((TempestFrame)frame).getExportDataAction());
+            menu.add(item);
+
+        } else {
+            // do nothing
+        }
 
         menu.addSeparator();
 
@@ -90,6 +111,13 @@ public class BeautiDefaultFileMenuFactory implements MenuFactory {
 
         menu.addSeparator();
 
+        if (application.getRecentFileMenu() != null) {
+            JMenu subMenu = application.getRecentFileMenu();
+            menu.add(subMenu);
+
+            menu.addSeparator();
+        }
+
         item = new JMenuItem(application.getExitAction());
         menu.add(item);
     }
@@ -97,4 +125,4 @@ public class BeautiDefaultFileMenuFactory implements MenuFactory {
     public int getPreferredAlignment() {
         return LEFT;
     }
-}
+}
\ No newline at end of file
diff --git a/src/dr/app/tempest/TempestFrame.java b/src/dr/app/tempest/TempestFrame.java
new file mode 100644
index 0000000..1de67f2
--- /dev/null
+++ b/src/dr/app/tempest/TempestFrame.java
@@ -0,0 +1,371 @@
+/*
+ * TempestFrame.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.evolution.io.*;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.FlexibleTree;
+import dr.evolution.util.TaxonList;
+import dr.app.tools.NexusExporter;
+import dr.stats.Regression;
+import dr.util.NumberFormatter;
+import jam.framework.DocumentFrame;
+import jam.framework.Exportable;
+
+import javax.swing.*;
+import javax.swing.plaf.BorderUIResource;
+import java.awt.*;
+import java.awt.datatransfer.Clipboard;
+import java.awt.datatransfer.StringSelection;
+import java.awt.event.ActionEvent;
+import java.io.*;
+import java.util.*;
+
+/**
+ * @author Andrew Rambaut
+ */
+public class TempestFrame extends DocumentFrame {
+
+    private static final long serialVersionUID = 2114148696789612509L;
+
+    private JTabbedPane tabbedPane = new JTabbedPane();
+    private JLabel statusLabel = new JLabel("No data loaded");
+
+    private TempestPanel tempestPanel;
+
+    TaxonList taxa = null;
+    java.util.List<Tree> trees = new ArrayList<Tree>();
+
+    public TempestFrame(String title) {
+        super();
+
+        setTitle(title);
+
+        // Prevent the application to close in requestClose()
+        // after a user cancel or a failure in beast file generation
+        setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
+
+        getOpenAction().setEnabled(true);
+        getSaveAction().setEnabled(false);
+        getSaveAsAction().setEnabled(false);
+
+        getFindAction().setEnabled(false);
+
+        getCutAction().setEnabled(false);
+        getPasteAction().setEnabled(false);
+        getDeleteAction().setEnabled(false);
+        getSelectAllAction().setEnabled(false);
+
+        getCopyAction().setEnabled(false);
+
+        getZoomWindowAction().setEnabled(false);
+    }
+
+    public void initializeComponents() {
+
+        tempestPanel = new TempestPanel(this, taxa, trees.get(0));
+
+        JPanel panel = new JPanel(new BorderLayout(0, 0));
+        panel.add(tempestPanel, BorderLayout.CENTER);
+
+        statusLabel.setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(0, 12, 6, 12)));
+        panel.add(statusLabel, BorderLayout.SOUTH);
+
+        getContentPane().setLayout(new BorderLayout(0, 0));
+        getContentPane().add(panel, BorderLayout.CENTER);
+
+        setSize(new Dimension(1024, 768));
+
+        setStatusMessage();
+    }
+
+    public void timeScaleChanged() {
+        tempestPanel.timeScaleChanged();
+        setStatusMessage();
+    }
+
+    protected boolean readFromFile(File file) throws IOException {
+        Reader reader = new FileReader(file);
+
+        BufferedReader bufferedReader = new BufferedReader(reader);
+        String line = bufferedReader.readLine();
+        while (line != null && line.length() == 0) {
+            line = bufferedReader.readLine();
+        }
+
+        boolean isNexus = (line != null && line.toUpperCase().contains("#NEXUS"));
+
+        reader = new FileReader(file);
+
+        Tree tree = null;
+        try {
+            if (isNexus) {
+                NexusImporter importer = new NexusImporter(reader);
+                tree = importer.importTree(taxa);
+            } else {
+                NewickImporter importer = new NewickImporter(reader);
+                tree = importer.importTree(taxa);
+            }
+
+        } catch (Importer.ImportException ime) {
+            JOptionPane.showMessageDialog(this, "Error parsing imported file: " + ime,
+                    "Error reading file",
+                    JOptionPane.ERROR_MESSAGE);
+            ime.printStackTrace();
+            return false;
+        } catch (IOException ioex) {
+            JOptionPane.showMessageDialog(this, "File I/O Error: " + ioex,
+                    "File I/O Error",
+                    JOptionPane.ERROR_MESSAGE);
+            ioex.printStackTrace();
+            return false;
+        } catch (Exception ex) {
+            JOptionPane.showMessageDialog(this, "Fatal exception: " + ex,
+                    "Error reading file",
+                    JOptionPane.ERROR_MESSAGE);
+            ex.printStackTrace();
+            return false;
+        }
+
+
+        if (tree == null) {
+            JOptionPane.showMessageDialog(this, "The file is not in a suitable format or contains no trees.",
+                    "Error reading file",
+                    JOptionPane.ERROR_MESSAGE);
+            return false;
+        }
+
+        FlexibleTree binaryTree = new FlexibleTree(tree, true);
+        binaryTree.resolveTree();
+        trees.add(binaryTree);
+        if (taxa == null) {
+            taxa = binaryTree;
+        }
+
+        getExportTreeAction().setEnabled(true);
+        getExportDataAction().setEnabled(true);
+
+        return true;
+    }
+
+    protected boolean writeToFile(File file) throws IOException {
+        return false;  //To change body of implemented methods use File | Settings | File Templates.
+    }
+
+    protected void doExportTree() {
+        FileDialog dialog = new FileDialog(this,
+                "Export Tree File...",
+                FileDialog.SAVE);
+
+        dialog.setVisible(true);
+        if (dialog.getFile() != null) {
+            File file = new File(dialog.getDirectory(), dialog.getFile());
+
+            PrintStream ps = null;
+            try {
+                ps = new PrintStream(file);
+                writeTreeFile(ps, false);
+                ps.close();
+            } catch (IOException ioe) {
+                JOptionPane.showMessageDialog(this, "Error writing tree file: " + ioe.getMessage(),
+                        "Export Error",
+                        JOptionPane.ERROR_MESSAGE);
+            }
+
+        }
+    }
+
+    private void doExportTimeTree() {
+        FileDialog dialog = new FileDialog(this,
+                "Export Time Tree File...",
+                FileDialog.SAVE);
+
+        dialog.setVisible(true);
+        if (dialog.getFile() != null) {
+            File file = new File(dialog.getDirectory(), dialog.getFile());
+
+            PrintStream ps = null;
+            try {
+                ps = new PrintStream(file);
+                writeTimeTreeFile(ps);
+                ps.close();
+            } catch (IOException ioe) {
+                JOptionPane.showMessageDialog(this, "Error writing tree file: " + ioe.getMessage(),
+                        "Export Error",
+                        JOptionPane.ERROR_MESSAGE);
+            }
+
+        }
+    }
+
+    protected void writeTimeTreeFile(PrintStream ps) throws IOException {
+
+        FlexibleTree tree = new FlexibleTree(tempestPanel.getTreeAsViewed());
+
+        Regression r = tempestPanel.getTemporalRooting().getRootToTipRegression(tempestPanel.getTreeAsViewed());
+
+        for (int i = 0; i < tree.getInternalNodeCount(); i++) {
+            NodeRef node = tree.getInternalNode(i);
+            double height = tree.getNodeHeight(node);
+            tree.setNodeHeight(node, height/r.getGradient());
+        }
+
+        TreeUtils.setHeightsFromDates(tree);
+
+        NexusExporter nexusExporter = new NexusExporter(new PrintStream(ps));
+        nexusExporter.exportTree(tree);
+    }
+
+
+    protected void writeTreeFile(PrintStream ps, boolean newickFormat) throws IOException {
+
+        Tree tree = tempestPanel.getTreeAsViewed();
+
+//        if (newickFormat) {
+//            NewickExporter newickExporter = new NewickExporter(ps);
+//            newickExporter.exportTree(tree);
+//        } else {
+        NexusExporter nexusExporter = new NexusExporter(new PrintStream(ps));
+        nexusExporter.exportTree(tree);
+//        }
+    }
+
+//    protected void doExportGraphic() {
+//        ExportDialog export = new ExportDialog();
+//        export.showExportDialog( this, "Export view as ...", treeViewer.getContentPane(), "export" );
+//    }
+
+    protected void doExportData() {
+        FileDialog dialog = new FileDialog(this,
+                "Export Data File...",
+                FileDialog.SAVE);
+
+        dialog.setVisible(true);
+        if (dialog.getFile() != null) {
+            File file = new File(dialog.getDirectory(), dialog.getFile());
+
+            Writer writer = null;
+            try {
+                writer = new PrintWriter(file);
+                tempestPanel.writeDataFile(writer);
+                writer.close();
+            } catch (IOException ioe) {
+                JOptionPane.showMessageDialog(this, "Error writing data file: " + ioe.getMessage(),
+                        "Export Error",
+                        JOptionPane.ERROR_MESSAGE);
+            }
+
+        }
+    }
+
+    private void setStatusMessage() {
+        Tree tree = tempestPanel.getTree();
+        if (tree != null) {
+            String message = "";
+            message += "Tree loaded, " + tree.getTaxonCount() + " taxa";
+
+            TemporalRooting tr = tempestPanel.getTemporalRooting();
+            if (tr.isContemporaneous()) {
+                message += ", contemporaneous tips";
+            } else {
+                NumberFormatter nf = new NumberFormatter(3);
+                message += ", dated tips with range " + nf.format(tr.getDateRange());
+            }
+            statusLabel.setText(message);
+        }
+    }
+
+    public JComponent getExportableComponent() {
+
+        JComponent exportable = null;
+        Component comp = tabbedPane.getSelectedComponent();
+
+        if (comp instanceof Exportable) {
+            exportable = ((Exportable) comp).getExportableComponent();
+        } else if (comp instanceof JComponent) {
+            exportable = (JComponent) comp;
+        }
+
+        return exportable;
+    }
+
+    @Override
+    public void doCopy() {
+        StringWriter writer = new StringWriter();
+        PrintWriter pwriter = new PrintWriter(writer);
+
+        for (String tip : tempestPanel.getSelectedTips()) {
+            pwriter.println(tip);
+        }
+
+        Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
+        StringSelection selection = new StringSelection(writer.toString());
+        clipboard.setContents(selection, selection);
+    }
+
+    public Action getExportTreeAction() {
+        return exportTreeAction;
+    }
+
+//    public Action getExportGraphicAction() {
+//        return exportGraphicAction;
+//    }
+
+    public Action getExportDataAction() {
+        return exportDataAction;
+    }
+
+    protected AbstractAction exportTreeAction = new AbstractAction("Export Tree...") {
+        public void actionPerformed(ActionEvent ae) {
+            doExportTree();
+        }
+    };
+
+//    protected AbstractAction exportGraphicAction = new AbstractAction("Export Graphic...") {
+//        public void actionPerformed(ActionEvent ae) {
+//            doExportGraphic();
+//        }
+//    };
+
+    protected AbstractAction exportDataAction = new AbstractAction("Export Data...") {
+        public void actionPerformed(ActionEvent ae) {
+            doExportData();
+        }
+    };
+
+    public Action getExportTimeTreeAction() {
+        return exportTimeTreeAction;
+    }
+
+    protected AbstractAction exportTimeTreeAction = new AbstractAction("Export Time Tree...") {
+        public void actionPerformed(ActionEvent ae) {
+            doExportTimeTree();
+        }
+    };
+
+
+}
\ No newline at end of file
diff --git a/src/dr/app/oldbeauti/BeautiMacFileMenuFactory.java b/src/dr/app/tempest/TempestMacFileMenuFactory.java
similarity index 53%
rename from src/dr/app/oldbeauti/BeautiMacFileMenuFactory.java
rename to src/dr/app/tempest/TempestMacFileMenuFactory.java
index 8c210ff..1b39958 100755
--- a/src/dr/app/oldbeauti/BeautiMacFileMenuFactory.java
+++ b/src/dr/app/tempest/TempestMacFileMenuFactory.java
@@ -1,5 +1,5 @@
 /*
- * BeautiMacFileMenuFactory.java
+ * TempestMacFileMenuFactory.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,7 +23,7 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.oldbeauti;
+package dr.app.tempest;
 
 import jam.framework.MenuFactory;
 import jam.framework.AbstractFrame;
@@ -39,9 +39,9 @@ import java.awt.event.ActionEvent;
  *         Date: Dec 26, 2004
  *         Time: 11:02:45 AM
  */
-public class BeautiMacFileMenuFactory implements MenuFactory {
+public class TempestMacFileMenuFactory implements MenuFactory {
 
-    public BeautiMacFileMenuFactory() {
+    public TempestMacFileMenuFactory() {
     }
 
     public String getMenuName() {
@@ -57,75 +57,105 @@ public class BeautiMacFileMenuFactory implements MenuFactory {
         item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, MenuBarFactory.MENU_MASK));
         menu.add(item);
 
-        if (frame instanceof BeautiFrame) {
-            item = new JMenuItem(frame.getImportAction());
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_I, MenuBarFactory.MENU_MASK));
-            menu.add(item);
+        item = new JMenuItem(application.getOpenAction());
+        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, MenuBarFactory.MENU_MASK));
+        menu.add(item);
 
-            menu.addSeparator();
+        if (frame != null) {
+            item = new JMenuItem(frame.getCloseWindowAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
+            menu.add(item);
 
-            item = new JMenuItem(((BeautiFrame)frame).getOpenAction());
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, MenuBarFactory.MENU_MASK));
+            item = new JMenuItem(frame.getSaveAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK));
             menu.add(item);
 
             item = new JMenuItem(frame.getSaveAsAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            menu.add(item);
+        } else {
+            // No frame available so create a disabled menu for the default menu bar
+            item = new JMenuItem("Close");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+
+            item = new JMenuItem("Save");
             item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+
+            item = new JMenuItem("Save As...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            item.setEnabled(false);
             menu.add(item);
+        }
 
-            menu.addSeparator();
+        menu.addSeparator();
+
+        if (frame instanceof TempestFrame) {
 
-            item = new JMenuItem(frame.getExportAction());
+            item = new JMenuItem(((TempestFrame)frame).getExportTreeAction());
             item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK));
             menu.add(item);
-        } else {
-            // If the frame is not a BeautiFrame then create a dummy set of disabled menu options.
-            // At present the only situation where this may happen is in Mac OS X when no windows
-            // are open and the menubar is created by the hidden frame.
 
-            item = new JMenuItem("Import NEXUS...");
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_I, MenuBarFactory.MENU_MASK));
-            item.setEnabled(false);
-            menu.add(item);
+//            item = new JMenuItem(((TemporalSamplerFrame)frame).getExportGraphicAction());
+//            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK + KeyEvent.ALT_MASK));
+//            menu.add(item);
 
-            menu.addSeparator();
+            item = new JMenuItem(((TempestFrame)frame).getExportDataAction());
+            menu.add(item);
 
-            item = new JMenuItem("Apply Template...");
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, MenuBarFactory.MENU_MASK));
-            item.setEnabled(false);
+            item = new JMenuItem(((TempestFrame)frame).getExportTimeTreeAction());
             menu.add(item);
 
-            item = new JMenuItem("Save Template As...");
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, MenuBarFactory.MENU_MASK));
+        } else {
+            // If the frame is not a BeautiFrame then create a dummy set of disabled menu options.
+            // At present the only situation where this may happen is in Mac OS X when no windows
+            // are open and the menubar is created by the hidden frame.
+
+            item = new JMenuItem("Export Tree...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK));
             item.setEnabled(false);
             menu.add(item);
 
-            menu.addSeparator();
+//            item = new JMenuItem("Export Graphic...");
+//            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK + KeyEvent.ALT_MASK));
+//            item.setEnabled(false);
+//            menu.add(item);
 
-            item = new JMenuItem("Generate BEAST File...");
-            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_E, MenuBarFactory.MENU_MASK));
+            item = new JMenuItem("Export Data...");
             item.setEnabled(false);
             menu.add(item);
         }
 
         menu.addSeparator();
 
-        item = new JMenuItem(frame.getCloseWindowAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
-        menu.add(item);
+        if (frame != null) {
+            item = new JMenuItem(frame.getPrintAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
+            menu.add(item);
 
-        menu.addSeparator();
+            item = new JMenuItem(application.getPageSetupAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            menu.add(item);
 
-        item = new JMenuItem(frame.getPrintAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
-        menu.add(item);
+        } else {
+            // No frame available so create a disabled menu for the default menu bar
+            item = new JMenuItem("Print...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
+            item.setEnabled(false);
+            menu.add(item);
 
-        item = new JMenuItem(application.getPageSetupAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
-        menu.add(item);
+            item = new JMenuItem("Page Setup...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+        }
 
     }
 
     public int getPreferredAlignment() {
         return LEFT;
     }
-}
+}
\ No newline at end of file
diff --git a/src/dr/app/oldbeauti/BeautiMenuBarFactory.java b/src/dr/app/tempest/TempestMenuBarFactory.java
similarity index 80%
rename from src/dr/app/oldbeauti/BeautiMenuBarFactory.java
rename to src/dr/app/tempest/TempestMenuBarFactory.java
index a8badb9..0b0cd36 100755
--- a/src/dr/app/oldbeauti/BeautiMenuBarFactory.java
+++ b/src/dr/app/tempest/TempestMenuBarFactory.java
@@ -1,5 +1,5 @@
 /*
- * BeautiMenuBarFactory.java
+ * TempestMenuBarFactory.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,8 +23,9 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.oldbeauti;
+package dr.app.tempest;
 
+import jam.mac.MacEditMenuFactory;
 import jam.mac.MacHelpMenuFactory;
 import jam.mac.MacWindowMenuFactory;
 import jam.framework.DefaultMenuBarFactory;
@@ -34,16 +35,16 @@ import jam.framework.DefaultHelpMenuFactory;
 import dr.app.util.OSType;
 
 
-public class BeautiMenuBarFactory extends DefaultMenuBarFactory {
+public class TempestMenuBarFactory extends DefaultMenuBarFactory {
 
-	public BeautiMenuBarFactory() {
+	public TempestMenuBarFactory() {
 		if (OSType.isMac()) {
-			registerMenuFactory(new BeautiMacFileMenuFactory());
-			registerMenuFactory(new DefaultEditMenuFactory());
+			registerMenuFactory(new TempestMacFileMenuFactory());
+			registerMenuFactory(new MacEditMenuFactory());
 			registerMenuFactory(new MacWindowMenuFactory());
 			registerMenuFactory(new MacHelpMenuFactory());
 		} else {
-			registerMenuFactory(new BeautiDefaultFileMenuFactory());
+			registerMenuFactory(new TempestDefaultFileMenuFactory());
 			registerMenuFactory(new DefaultEditMenuFactory());
 			registerMenuFactory(new DefaultHelpMenuFactory());
 		}
diff --git a/src/dr/app/tempest/TempestPanel.java b/src/dr/app/tempest/TempestPanel.java
new file mode 100644
index 0000000..bccd29e
--- /dev/null
+++ b/src/dr/app/tempest/TempestPanel.java
@@ -0,0 +1,907 @@
+/*
+ * TempestPanel.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import dr.app.gui.chart.*;
+import dr.app.gui.util.LongTask;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.util.TaxonList;
+import dr.math.MathUtils;
+import dr.stats.DiscreteStatistics;
+import dr.stats.Regression;
+import dr.stats.Variate;
+import dr.util.NumberFormatter;
+import figtree.panel.FigTreePanel;
+import figtree.treeviewer.TreePaneSelector;
+import figtree.treeviewer.TreeSelectionListener;
+import figtree.treeviewer.TreeViewer;
+import jam.framework.Exportable;
+import jam.panels.SearchPanel;
+import jam.panels.SearchPanelListener;
+import jam.table.TableRenderer;
+import jam.toolbar.Toolbar;
+import jam.toolbar.ToolbarAction;
+import jam.toolbar.ToolbarButton;
+import jam.util.IconUtils;
+import jebl.evolution.graphs.Node;
+import jebl.evolution.taxa.Taxon;
+import jebl.evolution.trees.RootedTree;
+
+import javax.swing.*;
+import javax.swing.plaf.BorderUIResource;
+import javax.swing.table.AbstractTableModel;
+import java.awt.*;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.io.PrintWriter;
+import java.io.Writer;
+import java.text.DecimalFormat;
+import java.util.*;
+import java.util.List;
+
+/**
+ * @author Andrew Rambaut
+ * @version $Id: PriorsPanel.java,v 1.9 2006/09/05 13:29:34 rambaut Exp $
+ */
+public class TempestPanel extends JPanel implements Exportable {
+
+    public static final String COLOUR = "Colour...";
+    public static final String CLEAR_COLOURING = "Clear Colouring...";
+
+    StatisticsModel statisticsModel;
+    JTable statisticsTable = null;
+
+    private Tree tree = null;
+    private Tree currentTree = null;
+    private Tree bestFittingRootTree = null;
+
+    private final TempestFrame frame;
+    private final JTabbedPane tabbedPane = new JTabbedPane();
+    private final JTextArea textArea = new JTextArea();
+    private final JCheckBox showMRCACheck = new JCheckBox("Show ancestor traces");
+
+    //    JTreeDisplay treePanel;
+    private final SamplesPanel samplesPanel;
+    private final FigTreePanel treePanel;
+
+    private SearchPanel filterPanel;
+    private JPopupMenu filterPopup;
+
+    JChartPanel rootToTipPanel;
+    JChart rootToTipChart;
+    ScatterPlot rootToTipPlot;
+
+    private static final boolean SHOW_NODE_DENSITY = true;
+    JChartPanel nodeDensityPanel;
+    JChart nodeDensityChart;
+    ScatterPlot nodeDensityPlot;
+
+    JChartPanel residualPanel;
+    JChart residualChart;
+    ScatterPlot residualPlot;
+
+    ErrorBarPlot errorBarPlot;
+    ParentPlot mrcaPlot;
+
+    Map<Node, Integer> pointMap = new HashMap<Node, Integer>();
+
+    Set<Integer> selectedPoints = new HashSet<Integer>();
+
+    private boolean bestFittingRoot;
+    private TemporalRooting.RootingFunction rootingFunction;
+    private TemporalRooting temporalRooting = null;
+
+    public TempestPanel(TempestFrame parent, TaxonList taxa, Tree tree) {
+        frame = parent;
+
+        samplesPanel = new SamplesPanel(parent, taxa);
+
+        tabbedPane.addTab("Sample Dates", samplesPanel);
+
+        statisticsModel = new StatisticsModel();
+        statisticsTable = new JTable(statisticsModel);
+
+        statisticsTable.getColumnModel().getColumn(0).setCellRenderer(
+                new TableRenderer(SwingConstants.RIGHT, new Insets(0, 4, 0, 4)));
+        statisticsTable.getColumnModel().getColumn(1).setCellRenderer(
+                new TableRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
+
+        JScrollPane scrollPane = new JScrollPane(statisticsTable,
+                JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
+                JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
+
+        Box controlPanel1 = new Box(BoxLayout.PAGE_AXIS);
+        controlPanel1.setOpaque(false);
+
+        JPanel panel3 = new JPanel(new BorderLayout(0, 0));
+        panel3.setOpaque(false);
+        rootingCheck = new JCheckBox("Best-fitting root");
+        panel3.add(rootingCheck, BorderLayout.CENTER);
+
+        controlPanel1.add(panel3);
+
+        final JComboBox rootingFunctionCombo = new JComboBox(TemporalRooting.RootingFunction.values());
+
+        JPanel panel4 = new JPanel(new BorderLayout(0,0));
+        panel4.setOpaque(false);
+        panel4.add(new JLabel("Function: "), BorderLayout.WEST);
+        panel4.add(rootingFunctionCombo, BorderLayout.CENTER);
+        controlPanel1.add(panel4);
+
+        JPanel panel1 = new JPanel(new BorderLayout(0, 0));
+
+        panel1.setOpaque(false);
+        panel1.add(scrollPane, BorderLayout.CENTER);
+        panel1.add(controlPanel1, BorderLayout.NORTH);
+
+        // Set up tree panel
+
+        Toolbar toolBar = new Toolbar();
+        toolBar.setOpaque(false);
+        toolBar.setBorder(BorderFactory.createMatteBorder(0, 0, 1, 0, Color.darkGray));
+
+        toolBar.setRollover(true);
+        toolBar.setFloatable(false);
+
+        Icon colourToolIcon = IconUtils.getIcon(this.getClass(), "images/coloursTool.png");
+
+        final ToolbarAction colourToolbarAction = new ToolbarAction("Colour", COLOUR, colourToolIcon) {
+            public void actionPerformed(ActionEvent e){
+                colourSelected();
+            }
+        };
+        ToolbarButton colourToolButton = new ToolbarButton(colourToolbarAction, true);
+        colourToolButton.setFocusable(false);
+        toolBar.addComponent(colourToolButton);
+
+        toolBar.addFlexibleSpace();
+
+        filterPopup = new JPopupMenu();
+
+        final ButtonGroup bg = new ButtonGroup();
+        boolean first = true;
+        for (TreeViewer.TextSearchType searchType : TreeViewer.TextSearchType.values()) {
+            final JCheckBoxMenuItem menuItem = new JCheckBoxMenuItem(searchType.toString());
+            if (first) {
+                menuItem.setSelected(true);
+                first = false;
+            }
+            filterPopup.add(menuItem);
+            bg.add(menuItem);
+        }
+        filterPanel = new SearchPanel("Filter", filterPopup, true);
+        filterPanel.setOpaque(false);
+//        filterPanel.getSearchText().requestFocus();
+        filterPanel.addSearchPanelListener(new SearchPanelListener() {
+
+            /**
+             * Called when the user requests a search by pressing return having
+             * typed a search string into the text field. If the continuousUpdate
+             * flag is true then this method is called when the user types into
+             * the text field.
+             *
+             * @param searchString the user's search string
+             */
+            public void searchStarted(String searchString) {
+                Enumeration e = bg.getElements();
+                String value = null;
+                while (e.hasMoreElements()) {
+                    AbstractButton button = (AbstractButton)e.nextElement();
+                    if (button.isSelected()) {
+                        value = button.getText();
+                    }
+                }
+
+                for (TreeViewer.TextSearchType searchType : TreeViewer.TextSearchType.values()) {
+                    if (searchType.toString().equals(value)) {
+                        treePanel.getTreeViewer().selectTaxa("!name", searchType, searchString, false);
+                    }
+                }
+            }
+
+            /**
+             * Called when the user presses the cancel search button or presses
+             * escape while the search is in focus.
+             */
+            public void searchStopped() {
+//                treeViewer.clearSelectedTaxa();
+            }
+        });
+
+        JPanel panel5 = new JPanel(new FlowLayout());
+        panel5.setOpaque(false);
+        panel5.add(filterPanel);
+        toolBar.addComponent(panel5);
+
+        treePanel = new FigTreePanel(FigTreePanel.Style.SIMPLE);
+
+        JPanel panel2 = new JPanel(new BorderLayout(0, 0));
+        panel2.setOpaque(false);
+        panel2.add(treePanel, BorderLayout.CENTER);
+        panel2.add(toolBar, BorderLayout.NORTH);
+
+        tabbedPane.add("Tree", panel2);
+
+        treePanel.getTreeViewer().setSelectionMode(TreePaneSelector.SelectionMode.TAXA);
+        treePanel.getTreeViewer().addTreeSelectionListener(new TreeSelectionListener() {
+            public void selectionChanged() {
+                treeSelectionChanged();
+            }
+        });
+
+        rootToTipChart = new JChart(new LinearAxis(), new LinearAxis(Axis.AT_ZERO, Axis.AT_MINOR_TICK));
+
+        ChartSelector selector1 = new ChartSelector(rootToTipChart);
+
+        rootToTipPanel = new JChartPanel(rootToTipChart, "", "time", "divergence");
+        JPanel panel = new JPanel(new BorderLayout());
+        panel.add(rootToTipPanel, BorderLayout.CENTER);
+        panel.add(showMRCACheck, BorderLayout.SOUTH);
+        panel.setOpaque(false);
+
+        tabbedPane.add("Root-to-tip", panel);
+
+        residualChart = new JChart(new LinearAxis(), new LinearAxis(Axis.AT_ZERO, Axis.AT_MINOR_TICK));
+
+        ChartSelector selector2 = new ChartSelector(residualChart);
+
+        residualPanel = new JChartPanel(residualChart, "", "time", "residual");
+        residualPanel.setOpaque(false);
+
+        tabbedPane.add("Residuals", residualPanel);
+
+//        textArea.setEditable(false);
+
+        JPanel panel6 = new JPanel(new BorderLayout(0, 0));
+        panel6.setOpaque(false);
+        panel6.add(tabbedPane, BorderLayout.CENTER);
+//        panel6.add(textArea, BorderLayout.SOUTH);
+
+        if (SHOW_NODE_DENSITY) {
+            nodeDensityChart = new JChart(new LinearAxis(), new LinearAxis(Axis.AT_ZERO, Axis.AT_MINOR_TICK));
+            nodeDensityPanel = new JChartPanel(nodeDensityChart, "", "time", "node density");
+            JPanel panel7 = new JPanel(new BorderLayout());
+            panel7.add(nodeDensityPanel, BorderLayout.CENTER);
+            panel7.setOpaque(false);
+
+            ChartSelector selector3 = new ChartSelector(nodeDensityChart);
+
+            tabbedPane.add("Node density", panel7);
+        }
+
+
+        JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, panel1, tabbedPane);
+        splitPane.setDividerLocation(220);
+        splitPane.setContinuousLayout(true);
+        splitPane.setBorder(BorderFactory.createEmptyBorder());
+        splitPane.setOpaque(false);
+
+        setOpaque(false);
+        setLayout(new BorderLayout(0, 0));
+        setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
+
+        add(splitPane, BorderLayout.CENTER);
+
+        rootingCheck.addActionListener(new ActionListener() {
+            public void actionPerformed(ActionEvent e) {
+                setBestFittingRoot(rootingCheck.isSelected(), (TemporalRooting.RootingFunction) rootingFunctionCombo.getSelectedItem());
+            }
+        });
+
+        rootingFunctionCombo.addActionListener(new ActionListener() {
+            public void actionPerformed(ActionEvent e) {
+                setBestFittingRoot(rootingCheck.isSelected(), (TemporalRooting.RootingFunction) rootingFunctionCombo.getSelectedItem());
+            }
+        });
+
+        showMRCACheck.addActionListener(new ActionListener() {
+            public void actionPerformed(ActionEvent e) {
+                setupPanel();
+            }
+        });
+
+
+        setTree(tree);
+    }
+
+    public List<String> getSelectedTips() {
+        List<String> tips = new ArrayList<String>();
+        jebl.evolution.trees.Tree tree = treePanel.getTreeViewer().getTrees().get(0);
+
+        for (Node node : treePanel.getTreeViewer().getSelectedTips()) {
+            tips.add(tree.getTaxon(node).getName());
+        }
+        return tips;
+    }
+
+    private static Color lastColor = Color.GRAY;
+
+    private void colourSelected() {
+        Color color = JColorChooser.showDialog(this, "Select Colour", lastColor);
+        if (color != null) {
+            treePanel.getTreeViewer().annotateSelectedTips("!color", color);
+            lastColor = color;
+        }
+        setupPanel();
+    }
+
+    private void treeSelectionChanged() {
+        Set<Node> selectedTips = treePanel.getTreeViewer().getSelectedTips();
+        frame.getCopyAction().setEnabled(selectedTips != null && selectedTips.size() > 0);
+        selectedPoints = new HashSet<Integer>();
+        for (Node node : selectedTips) {
+            selectedPoints.add(pointMap.get(node));
+        }
+        if (rootToTipPlot != null) {
+            rootToTipPlot.setSelectedPoints(selectedPoints);
+        }
+        if (residualPlot != null) {
+            residualPlot.setSelectedPoints(selectedPoints);
+        }
+        if (SHOW_NODE_DENSITY && nodeDensityPlot != null) {
+            nodeDensityPlot.setSelectedPoints(selectedPoints);
+        }
+
+        selectMRCA();
+    }
+
+    private void plotSelectionChanged(final Set<Integer> selectedPoints) {
+        this.selectedPoints = selectedPoints;
+        Set<String> selectedTaxa = new HashSet<String>();
+        for (Integer i : selectedPoints) {
+            selectedTaxa.add(tree.getTaxon(i).toString());
+        }
+
+        treePanel.getTreeViewer().selectTaxa(selectedTaxa);
+
+        selectMRCA();
+    }
+
+    private void selectMRCA() {
+        if (mrcaPlot == null) return;
+
+        if (selectedPoints != null && selectedPoints.size() > 0) {
+
+            Set<String> selectedTaxa = new HashSet<String>();
+            for (Integer i : selectedPoints) {
+                selectedTaxa.add(tree.getTaxon(i).toString());
+            }
+
+            Regression r = temporalRooting.getRootToTipRegression(currentTree);
+            NodeRef mrca = Tree.Utils.getCommonAncestorNode(currentTree, selectedTaxa);
+            double mrcaDistance1 = temporalRooting.getRootToTipDistance(currentTree, mrca);
+            double mrcaTime1 = r.getX(mrcaDistance1);
+            if (tree.isExternal(mrca)) {
+                mrca = tree.getParent(mrca);
+            }
+            double mrcaDistance = temporalRooting.getRootToTipDistance(currentTree, mrca);
+            double mrcaTime = r.getX(mrcaDistance);
+
+            mrcaPlot.setSelectedPoints(selectedPoints, mrcaTime, mrcaDistance);
+        } else {
+            mrcaPlot.clearSelection();
+        }
+        repaint();
+    }
+
+    public void timeScaleChanged() {
+        bestFittingRootTree = null;
+        if (rootingCheck.isSelected()) {
+            rootingCheck.setSelected(false);
+        } else {
+            setupPanel();
+        }
+    }
+
+    public JComponent getExportableComponent() {
+        return (JComponent) tabbedPane.getSelectedComponent();
+    }
+
+    public void setTree(Tree tree) {
+        this.tree = tree;
+        setupPanel();
+    }
+
+    public void setBestFittingRoot(boolean bestFittingRoot, final TemporalRooting.RootingFunction rootingFunction) {
+        this.bestFittingRoot = bestFittingRoot;
+        if (this.rootingFunction != rootingFunction) {
+            bestFittingRootTree = null;
+            this.rootingFunction = rootingFunction;
+        }
+        if (this.bestFittingRoot && bestFittingRootTree == null) {
+            findRoot();
+        }
+
+        setupPanel();
+    }
+
+    public Tree getTree() {
+        return tree;
+    }
+
+    public Tree getTreeAsViewed() {
+        return currentTree;
+    }
+
+    public void writeDataFile(Writer writer) {
+        PrintWriter pw = new PrintWriter(writer);
+        String labels[] = temporalRooting.getTipLabels(currentTree);
+        double yValues[] = temporalRooting.getRootToTipDistances(currentTree);
+
+        if (temporalRooting.isContemporaneous()) {
+            double meanY = DiscreteStatistics.mean(yValues);
+            pw.println("tip\tdistance\tdeviation");
+            for (int i = 0; i < yValues.length; i++) {
+                pw.println(labels[i] + "\t" + "\t" + yValues[i] + "\t" + (yValues[i] - meanY));
+            }
+        } else {
+            double xValues[] = temporalRooting.getTipDates(currentTree);
+            Regression r = temporalRooting.getRootToTipRegression(currentTree);
+            double[] residuals = temporalRooting.getRootToTipResiduals(currentTree, r);
+            pw.println("tip\tdate\tdistance\tresidual");
+            for (int i = 0; i < xValues.length; i++) {
+                pw.println(labels[i] + "\t" + xValues[i] + "\t" + yValues[i] + "\t" + residuals[i]);
+            }
+        }
+    }
+
+    public void setupPanel() {
+        StringBuilder sb = new StringBuilder();
+        NumberFormatter nf = new NumberFormatter(6);
+
+        if (tree != null) {
+            temporalRooting = new TemporalRooting(tree);
+            currentTree = this.tree;
+
+            if (bestFittingRoot && bestFittingRootTree != null) {
+                currentTree = bestFittingRootTree;
+                sb.append("Best-fitting root");
+            } else {
+                sb.append("User root");
+            }
+
+            if (temporalRooting.isContemporaneous()) {
+                if (tabbedPane.getSelectedIndex() == 2) {
+                    tabbedPane.setSelectedIndex(1);
+                }
+                tabbedPane.setEnabledAt(2, false);
+            } else {
+                tabbedPane.setEnabledAt(2, true);
+            }
+
+            RootedTree jtree = Tree.Utils.asJeblTree(currentTree);
+
+            List<Color> colours = new ArrayList<Color>();
+            for (Node tip : jtree.getExternalNodes()) {
+                Taxon taxon = jtree.getTaxon(tip);
+                colours.add((Color)taxon.getAttribute("!color"));
+            }
+
+            if (temporalRooting.isContemporaneous()) {
+                double[] dv = temporalRooting.getRootToTipDistances(currentTree);
+
+                List<Double> values = new ArrayList<Double>();
+                for (double d : dv) {
+                    values.add(d);
+                }
+
+                rootToTipChart.removeAllPlots();
+                NumericalDensityPlot dp = new NumericalDensityPlot(values, 20, null);
+                dp.setLineColor(new Color(9, 70, 15));
+
+                double yOffset = (Double) dp.getYData().getMax() / 2;
+                List<Double> dummyValues = new ArrayList<Double>();
+                for (int i = 0; i < values.size(); i++) {
+                    // add a random y offset to give some visual spread
+                    double y = MathUtils.nextGaussian() * ((Double) dp.getYData().getMax() * 0.05);
+                    dummyValues.add(yOffset + y);
+                }
+
+                rootToTipPlot = new ScatterPlot(values, dummyValues);
+                rootToTipPlot.setColours(colours);
+                rootToTipPlot.setMarkStyle(Plot.CIRCLE_MARK, 8, new BasicStroke(0.0F), new Color(44, 44, 44), new Color(129, 149, 149));
+                rootToTipPlot.setHilightedMarkStyle(new BasicStroke(0.5F), new Color(44, 44, 44), UIManager.getColor("List.selectionBackground"));
+                rootToTipPlot.addListener(new Plot.Adaptor() {
+                    @Override
+                    public void markClicked(int index, double x, double y, boolean isShiftDown) {
+                        rootToTipPlot.selectPoint(index, isShiftDown);
+                    }
+
+                    public void selectionChanged(final Set<Integer> selectedPoints) {
+                        plotSelectionChanged(selectedPoints);
+                    }
+                });
+
+                rootToTipChart.addPlot(rootToTipPlot);
+                rootToTipChart.addPlot(dp);
+                rootToTipPanel.setXAxisTitle("root-to-tip divergence");
+                rootToTipPanel.setYAxisTitle("proportion");
+
+                residualChart.removeAllPlots();
+
+                sb.append(", contemporaneous tips");
+                sb.append(", mean root-tip distance: " + nf.format(DiscreteStatistics.mean(dv)));
+                sb.append(", coefficient of variation: " + nf.format(DiscreteStatistics.stdev(dv) / DiscreteStatistics.mean(dv)));
+                sb.append(", stdev: " + nf.format(DiscreteStatistics.stdev(dv)));
+                sb.append(", variance: " + nf.format(DiscreteStatistics.variance(dv)));
+
+                showMRCACheck.setVisible(false);
+            } else {
+                Regression r = temporalRooting.getRootToTipRegression(currentTree);
+
+                double[] residuals = temporalRooting.getRootToTipResiduals(currentTree, r);
+                pointMap.clear();
+                for (int i = 0; i < currentTree.getExternalNodeCount(); i++) {
+                    NodeRef tip = currentTree.getExternalNode(i);
+                    Node node = jtree.getNode(Taxon.getTaxon(currentTree.getNodeTaxon(tip).getId()));
+                    node.setAttribute("residual", residuals[i]);
+
+                    pointMap.put(node, i);
+                }
+
+                rootToTipChart.removeAllPlots();
+
+                if (showMRCACheck.isSelected()) {
+                    double[] dv = temporalRooting.getParentRootToTipDistances(currentTree);
+
+                    List<Double> parentDistances = new ArrayList<Double>();
+                    for (int i = 0; i < dv.length; i++) {
+                        parentDistances.add(i, dv[i]);
+                    }
+
+                    List<Double> parentTimes = new ArrayList<Double>();
+                    for (int i = 0; i < parentDistances.size(); i++) {
+                        parentTimes.add(i, r.getX(parentDistances.get(i)));
+                    }
+                    mrcaPlot = new ParentPlot(r.getXData(), r.getYData(), parentTimes, parentDistances);
+                    mrcaPlot.setLineColor(new Color(105, 202, 105));
+                    mrcaPlot.setLineStroke(new BasicStroke(0.5F));
+
+                    rootToTipChart.addPlot(mrcaPlot);
+                }
+
+                if (true) {
+                    double[] datePrecisions = temporalRooting.getTipDatePrecisions(currentTree);
+
+                    Variate.D ed = new Variate.D();
+
+                    for (int i = 0; i < datePrecisions.length; i++) {
+                        ed.add(datePrecisions[i]);
+                    }
+
+                    errorBarPlot = new ErrorBarPlot(ErrorBarPlot.Orientation.HORIZONTAL, r.getXData(), r.getYData(), ed);
+                    errorBarPlot.setLineColor(new Color(44, 44, 44));
+                    errorBarPlot.setLineStroke(new BasicStroke(1.0F));
+
+                    rootToTipChart.addPlot(errorBarPlot);
+                }
+
+                rootToTipPlot = new ScatterPlot(r.getXData(), r.getYData());
+                rootToTipPlot.addListener(new Plot.Adaptor() {
+                    public void selectionChanged(final Set<Integer> selectedPoints) {
+                        plotSelectionChanged(selectedPoints);
+                    }
+                });
+
+                rootToTipPlot.setColours(colours);
+
+                rootToTipPlot.setMarkStyle(Plot.CIRCLE_MARK, 8, new BasicStroke(0.0F), new Color(44, 44, 44), new Color(129, 149, 149));
+                rootToTipPlot.setHilightedMarkStyle(new BasicStroke(0.5F), new Color(44, 44, 44), UIManager.getColor("List.selectionBackground"));
+
+                rootToTipChart.addPlot(rootToTipPlot);
+
+                rootToTipChart.addPlot(new RegressionPlot(r));
+
+                rootToTipChart.getXAxis().addRange(r.getXIntercept(), (Double) r.getXData().getMax());
+                rootToTipPanel.setXAxisTitle("time");
+                rootToTipPanel.setYAxisTitle("root-to-tip divergence");
+
+                residualChart.removeAllPlots();
+                Variate.D values = (Variate.D) r.getYResidualData();
+                NumericalDensityPlot dp = new NumericalDensityPlot(values, 20);
+                dp.setLineColor(new Color(103, 128, 144));
+
+                double yOffset = (Double) dp.getYData().getMax() / 2;
+                Double[] dummyValues = new Double[values.getCount()];
+                for (int i = 0; i < dummyValues.length; i++) {
+                    // add a random y offset to give some visual spread
+                    double y = MathUtils.nextGaussian() * ((Double) dp.getYData().getMax() * 0.05);
+                    dummyValues[i] = yOffset + y;
+                }
+                Variate.D yOffsetValues = new Variate.D(dummyValues);
+                residualPlot = new ScatterPlot(values, yOffsetValues);
+                residualPlot.addListener(new Plot.Adaptor() {
+                    @Override
+                    public void markClicked(int index, double x, double y, boolean isShiftDown) {
+                        rootToTipPlot.selectPoint(index, isShiftDown);
+                    }
+
+                    @Override
+                    public void selectionChanged(final Set<Integer> selectedPoints) {
+                        plotSelectionChanged(selectedPoints);
+                    }
+                });
+                residualPlot.setColours(colours);
+                residualPlot.setMarkStyle(Plot.CIRCLE_MARK, 8, new BasicStroke(0.0F), new Color(44, 44, 44), new Color(129, 149, 149));
+                residualPlot.setHilightedMarkStyle(new BasicStroke(0.5F), new Color(44, 44, 44), UIManager.getColor("List.selectionBackground"));
+
+                residualChart.addPlot(residualPlot);
+                residualChart.addPlot(dp);
+                residualPanel.setXAxisTitle("residual");
+                residualPanel.setYAxisTitle("proportion");
+
+//                residualChart.removeAllPlots();
+//                residualPlot = new ScatterPlot(r.getXData(), r.getYResidualData());
+//                residualPlot.addListener(new Plot.Adaptor() {
+//                    public void selectionChanged(final Set<Integer> selectedPoints) {
+//                        plotSelectionChanged(selectedPoints);
+//                    }
+//                });
+//                residualChart.addPlot(residualPlot);
+//                residualPanel.setXAxisTitle("residual");
+//                residualPanel.setYAxisTitle("proportion");
+
+                if (SHOW_NODE_DENSITY) {
+                    Regression r2 = temporalRooting.getNodeDensityRegression(currentTree);
+                    nodeDensityChart.removeAllPlots();
+                    nodeDensityPlot = new ScatterPlot(r2.getXData(), r2.getYData());
+                    nodeDensityPlot.addListener(new Plot.Adaptor() {
+                        public void selectionChanged(final Set<Integer> selectedPoints) {
+                            plotSelectionChanged(selectedPoints);
+                        }
+                    });
+                    nodeDensityPlot.setColours(colours);
+                    nodeDensityPlot.setMarkStyle(Plot.CIRCLE_MARK, 8, new BasicStroke(0.0F), new Color(44, 44, 44), new Color(129, 149, 149));
+                    nodeDensityPlot.setHilightedMarkStyle(new BasicStroke(0.5F), new Color(44, 44, 44), UIManager.getColor("List.selectionBackground"));
+
+                    nodeDensityChart.addPlot(nodeDensityPlot);
+
+                    nodeDensityChart.addPlot(new RegressionPlot(r2));
+
+                    nodeDensityChart.getXAxis().addRange(r2.getXIntercept(), (Double) r2.getXData().getMax());
+                    nodeDensityPanel.setXAxisTitle("time");
+                    nodeDensityPanel.setYAxisTitle("node density");
+                }
+
+                sb.append(", dated tips");
+                sb.append(", date range: " + nf.format(temporalRooting.getDateRange()));
+                sb.append(", slope (rate): " + nf.format(r.getGradient()));
+                sb.append(", x-intercept (TMRCA): " + nf.format(r.getXIntercept()));
+                sb.append(", corr. coeff: " + nf.format(r.getCorrelationCoefficient()));
+                sb.append(", R^2: " + nf.format(r.getRSquared()));
+
+                showMRCACheck.setVisible(true);
+            }
+
+            treePanel.setTree(jtree);
+            treePanel.setColourBy("residual");
+
+        } else {
+            treePanel.setTree(null);
+            rootToTipChart.removeAllPlots();
+            sb.append("No trees loaded");
+        }
+
+        textArea.setText(sb.toString());
+
+        statisticsModel.fireTableStructureChanged();
+        repaint();
+    }
+
+    private javax.swing.Timer timer = null;
+
+
+    private void findRoot() {
+
+//        bestFittingRootTree = temporalRooting.findRoot(tree);
+        final FindRootTask analyseTask = new FindRootTask();
+
+        final ProgressMonitor progressMonitor = new ProgressMonitor(frame,
+                "Finding best-fit root",
+                "", 0, tree.getNodeCount());
+        progressMonitor.setMillisToPopup(0);
+        progressMonitor.setMillisToDecideToPopup(0);
+
+        timer = new javax.swing.Timer(10, new ActionListener() {
+            public void actionPerformed(ActionEvent evt) {
+                progressMonitor.setProgress(analyseTask.getCurrent());
+                if (progressMonitor.isCanceled() || analyseTask.done()) {
+                    progressMonitor.close();
+                    analyseTask.stop();
+                    timer.stop();
+                }
+            }
+        });
+
+        analyseTask.go();
+        timer.start();
+
+    }
+
+    class FindRootTask extends LongTask {
+
+        public FindRootTask() {
+        }
+
+        public int getCurrent() {
+            return temporalRooting.getCurrentRootBranch();
+        }
+
+        public int getLengthOfTask() {
+            return temporalRooting.getTotalRootBranches();
+        }
+
+        public String getDescription() {
+            return "Calculating demographic reconstruction...";
+        }
+
+        public String getMessage() {
+            return null;
+        }
+
+        public Object doWork() {
+            bestFittingRootTree = temporalRooting.findRoot(tree, rootingFunction);
+            EventQueue.invokeLater(
+                    new Runnable() {
+                        public void run() {
+                            setupPanel();
+                        }
+                    });
+
+            return null;
+        }
+
+    }
+
+
+    public TemporalRooting getTemporalRooting() {
+        return temporalRooting;
+    }
+
+    class StatisticsModel extends AbstractTableModel {
+
+        String[] rowNamesDatedTips = {"Date range", "Slope (rate)", "X-Intercept (TMRCA)", "Correlation Coefficient", "R squared", "Residual Mean Squared"};
+        String[] rowNamesContemporaneousTips = {"Mean root-tip", "Coefficient of variation", "Stdev", "Variance"};
+
+        private DecimalFormat formatter = new DecimalFormat("0.####E0");
+        private DecimalFormat formatter2 = new DecimalFormat("####0.####");
+
+        public StatisticsModel() {
+        }
+
+        public int getColumnCount() {
+            return 2;
+        }
+
+        public int getRowCount() {
+            if (temporalRooting == null) {
+                return 0;
+            } else if (temporalRooting.isContemporaneous()) {
+                return rowNamesContemporaneousTips.length;
+            } else {
+                return rowNamesDatedTips.length;
+            }
+        }
+
+        public Object getValueAt(int row, int col) {
+
+            double value = 0;
+            if (temporalRooting.isContemporaneous()) {
+                if (col == 0) {
+                    return rowNamesContemporaneousTips[row];
+                }
+                double values[] = temporalRooting.getRootToTipDistances(currentTree);
+
+                switch (row) {
+                    case 0:
+                        value = DiscreteStatistics.mean(values);
+                        break;
+                    case 1:
+                        value = DiscreteStatistics.stdev(values) / DiscreteStatistics.mean(values);
+                        break;
+                    case 2:
+                        value = DiscreteStatistics.stdev(values);
+                        break;
+                    case 3:
+                        value = DiscreteStatistics.variance(values);
+                        break;
+                }
+            } else {
+                Regression r = temporalRooting.getRootToTipRegression(currentTree);
+                if (col == 0) {
+                    return rowNamesDatedTips[row];
+                }
+                switch (row) {
+                    case 0:
+                        value = temporalRooting.getDateRange();
+                        break;
+                    case 1:
+                        value = r.getGradient();
+                        break;
+                    case 2:
+                        value = r.getXIntercept();
+                        break;
+                    case 3:
+                        value = r.getCorrelationCoefficient();
+                        break;
+                    case 4:
+                        value = r.getRSquared();
+                        break;
+                    case 5:
+                        value = r.getResidualMeanSquared();
+                        break;
+                }
+            }
+
+            if (value > 0 && (Math.abs(value) < 0.1 || Math.abs(value) >= 100000.0)) {
+                return formatter.format(value);
+            } else return formatter2.format(value);
+        }
+
+        public String getColumnName(int column) {
+            if (column > 0) {
+                return "";
+            }
+            if (temporalRooting == null) {
+                return "No tree loaded";
+            } else if (temporalRooting.isContemporaneous()) {
+                return "Contemporaneous Tips";
+            } else {
+                return "Dated Tips";
+            }
+        }
+
+        public Class getColumnClass(int c) {
+            return getValueAt(0, c).getClass();
+        }
+
+        public String toString() {
+            StringBuffer buffer = new StringBuffer();
+
+            buffer.append(getColumnName(0));
+            for (int j = 1; j < getColumnCount(); j++) {
+                buffer.append("\t");
+                buffer.append(getColumnName(j));
+            }
+            buffer.append("\n");
+
+            for (int i = 0; i < getRowCount(); i++) {
+                buffer.append(getValueAt(i, 0));
+                for (int j = 1; j < getColumnCount(); j++) {
+                    buffer.append("\t");
+                    buffer.append(getValueAt(i, j));
+                }
+                buffer.append("\n");
+            }
+
+            return buffer.toString();
+        }
+    }
+
+    private JCheckBox rootingCheck;
+
+}
diff --git a/src/dr/app/pathogen/TemporalRooting.java b/src/dr/app/tempest/TemporalRooting.java
similarity index 98%
copy from src/dr/app/pathogen/TemporalRooting.java
copy to src/dr/app/tempest/TemporalRooting.java
index 39e641e..c0292ff 100644
--- a/src/dr/app/pathogen/TemporalRooting.java
+++ b/src/dr/app/tempest/TemporalRooting.java
@@ -23,7 +23,7 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.pathogen;
+package dr.app.tempest;
 
 import dr.evolution.tree.*;
 import dr.evolution.util.*;
@@ -436,7 +436,9 @@ public class TemporalRooting {
         double sum_y = 0.0;
         double sum_ty = 0.0;
         double sum_tc = 0.0;
-
+        double Nd = N;
+        double nd = n;  // need to set these naughty guys to doubles
+        
         for (int i = 0; i < N; i++) {
             sum_tt += t[i] * t[i];
             sum_t += t[i];
@@ -444,14 +446,13 @@ public class TemporalRooting {
             sum_ty += t[i] * y[i];
             sum_tc += t[i] * c[i];
         }
-        double y_bar = sum_y / N;
-        double t_bar = sum_t / N;
+        double y_bar = sum_y / Nd;
+        double t_bar = sum_t / Nd;
 
-        double C = sum_tt - (sum_t * sum_t / N);
+        double C = sum_tt - (sum_t * sum_t / Nd);
         double sumAB = 0.0;
         double sumAA = 0.0;
-        double Nd = N;
-        double nd = n;  // need to set these naughty ones to doubles
+      
         for (int i = 0; i < N; i++) {
             double Ai = 2*c[i] - 
             		    ((2*nd-Nd)/Nd) +
@@ -462,7 +463,6 @@ public class TemporalRooting {
             sumAB += Ai * Bi;
             sumAA += Ai * Ai;
         }
-
         double x = -sumAB / (sumLength * sumAA);
         x = Math.min(Math.max(x, 0.0), 1.0);
 
@@ -567,7 +567,7 @@ public class TemporalRooting {
 
     private void setHeightsFromDates(FlexibleTree tree) {
 
-        dr.evolution.util.Date mostRecent = null;
+        Date mostRecent = null;
         for (int i = 0; i < taxa.getTaxonCount(); i++) {
             Date date = taxa.getTaxon(i).getDate();
             if ((date != null) && (mostRecent == null || date.after(mostRecent))) {
@@ -593,3 +593,4 @@ public class TemporalRooting {
     }
 
 }
+
diff --git a/src/dr/app/tempest/TemporalStress.java b/src/dr/app/tempest/TemporalStress.java
new file mode 100644
index 0000000..b29729e
--- /dev/null
+++ b/src/dr/app/tempest/TemporalStress.java
@@ -0,0 +1,149 @@
+/*
+ * TemporalStress.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.tempest;
+
+import java.util.*;
+
+import dr.stats.DiscreteStatistics;
+import dr.math.UnivariateFunction;
+import dr.math.UnivariateMinimum;
+import dr.evolution.tree.*;
+import dr.evolution.util.Taxon;
+
+/**
+ * @author Andrew Rambaut
+ * @version $Id$
+ */
+public class TemporalStress {
+
+    public static Set<Taxon> annotateStress(MutableTree tree, NodeRef node) {
+        Set<Taxon> taxa = new HashSet<Taxon>();
+
+        if (!tree.isExternal(node)) {
+            for (int i = 0; i < tree.getChildCount(node); i++) {
+                NodeRef child = tree.getChild(node, i);
+                taxa.addAll(annotateStress(tree, child));
+            }
+
+            if (taxa.size() > 2) {
+                Tree subtree = null;
+
+                double stress = findGlobalRoot(subtree);
+                tree.setNodeAttribute(node, "stress", stress);
+
+            }
+        } else {
+            taxa.add(tree.getNodeTaxon(node));
+        }
+
+        return taxa;
+    }
+
+
+    private static double findGlobalRoot(Tree source) {
+
+        FlexibleTree bestTree = new FlexibleTree(source);
+        double minF = findLocalRoot(bestTree);
+
+        for (int i = 0; i < source.getNodeCount(); i++) {
+            FlexibleTree tmpTree = new FlexibleTree(source);
+            NodeRef node = tmpTree.getNode(i);
+            if (!tmpTree.isRoot(node)) {
+                double length = tmpTree.getBranchLength(node);
+                tmpTree.changeRoot(node, length * 0.5, length * 0.5);
+
+                double f = findLocalRoot(tmpTree);
+                if (f < minF) {
+                    minF = f;
+                    bestTree = tmpTree;
+                }
+            }
+        }
+        return minF;
+    }
+
+    private static double findLocalRoot(final FlexibleTree tree) {
+
+        NodeRef node1 = tree.getChild(tree.getRoot(), 0);
+        NodeRef node2 = tree.getChild(tree.getRoot(), 1);
+
+        final double length1 = tree.getBranchLength(node1);
+        final double length2 = tree.getBranchLength(node2);
+
+        final double sumLength = length1 + length2;
+
+        final Set<NodeRef> tipSet1 = Tree.Utils.getExternalNodes(tree, node1);
+        final Set<NodeRef> tipSet2 = Tree.Utils.getExternalNodes(tree, node2);
+
+        final double[] y = new double[tree.getExternalNodeCount()];
+
+        UnivariateFunction f = new UnivariateFunction() {
+            public double evaluate(double argument) {
+                double l1 = argument * sumLength;
+
+                for (NodeRef tip : tipSet1) {
+                    y[tip.getNumber()] = getRootToTipDistance(tree, tip) - length1 + l1;
+                }
+
+                double l2 = (1.0 - argument) * sumLength;
+
+                for (NodeRef tip : tipSet2) {
+                    y[tip.getNumber()] = getRootToTipDistance(tree, tip) - length2 + l2;
+                }
+
+                return DiscreteStatistics.variance(y);
+            }
+
+            public double getLowerBound() { return 0; }
+            public double getUpperBound() { return 1.0; }
+        };
+
+        UnivariateMinimum minimum = new UnivariateMinimum();
+
+        double x = minimum.findMinimum(f);
+
+        double fminx = minimum.fminx;
+
+        double l1 = x * sumLength;
+        double l2 = (1.0 - x) * sumLength;
+
+        tree.setBranchLength(node1, l1);
+        tree.setBranchLength(node2, l2);
+
+        return fminx;
+    }
+
+    private static double getRootToTipDistance(Tree tree, NodeRef node) {
+        double distance = 0;
+        while (node != null) {
+            distance += tree.getBranchLength(node);
+            node = tree.getParent(node);
+        }
+        return distance;
+    }
+
+
+}
diff --git a/src/dr/app/oldbeauti/TreeUtils.java b/src/dr/app/tempest/TreeUtils.java
similarity index 95%
rename from src/dr/app/oldbeauti/TreeUtils.java
rename to src/dr/app/tempest/TreeUtils.java
index 6feb23c..4b66bbf 100644
--- a/src/dr/app/oldbeauti/TreeUtils.java
+++ b/src/dr/app/tempest/TreeUtils.java
@@ -23,7 +23,7 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.oldbeauti;
+package dr.app.tempest;
 
 import dr.evolution.tree.MutableTree;
 import dr.evolution.tree.NodeRef;
@@ -142,13 +142,13 @@ public class TreeUtils {
      */
     public static void setHeightsFromDates(MutableTree tree) {
 
-        dr.evolution.util.Date mostRecent = null;
+        Date mostRecent = null;
 
         for (int i = 0; i < tree.getExternalNodeCount(); i++) {
 
             Taxon taxon = tree.getNodeTaxon(tree.getExternalNode(i));
 
-            dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute("date");
+            Date date = (Date) taxon.getAttribute("date");
 
             if (date != null) {
 
@@ -163,7 +163,7 @@ public class TreeUtils {
         for (int i = 0; i < tree.getExternalNodeCount(); i++) {
             NodeRef node = tree.getExternalNode(i);
             Taxon taxon = tree.getNodeTaxon(node);
-            dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute("date");
+            Date date = (Date) taxon.getAttribute("date");
 
             if (date != null) {
                 double height = timeScale.convertTime(date.getTimeValue(), date);
diff --git a/src/dr/app/oldbeauti/images/beauti.png b/src/dr/app/tempest/images/beauti.png
similarity index 100%
rename from src/dr/app/oldbeauti/images/beauti.png
rename to src/dr/app/tempest/images/beauti.png
diff --git a/src/dr/app/tempest/images/coloursTool.png b/src/dr/app/tempest/images/coloursTool.png
new file mode 100755
index 0000000..a71ebe8
Binary files /dev/null and b/src/dr/app/tempest/images/coloursTool.png differ
diff --git a/src/dr/app/oldbeauti/images/exclude.png b/src/dr/app/tempest/images/exclude.png
similarity index 100%
rename from src/dr/app/oldbeauti/images/exclude.png
rename to src/dr/app/tempest/images/exclude.png
diff --git a/src/dr/app/oldbeauti/images/gear.png b/src/dr/app/tempest/images/gear.png
similarity index 100%
rename from src/dr/app/oldbeauti/images/gear.png
rename to src/dr/app/tempest/images/gear.png
diff --git a/src/dr/app/oldbeauti/images/include.png b/src/dr/app/tempest/images/include.png
similarity index 100%
rename from src/dr/app/oldbeauti/images/include.png
rename to src/dr/app/tempest/images/include.png
diff --git a/src/dr/app/tempest/images/tempest.png b/src/dr/app/tempest/images/tempest.png
new file mode 100644
index 0000000..90d8e42
Binary files /dev/null and b/src/dr/app/tempest/images/tempest.png differ
diff --git a/src/dr/app/tools/TreeAnnotator.java b/src/dr/app/tools/TreeAnnotator.java
index dde8a4b..06534d4 100644
--- a/src/dr/app/tools/TreeAnnotator.java
+++ b/src/dr/app/tools/TreeAnnotator.java
@@ -26,6 +26,7 @@
 package dr.app.tools;
 
 import dr.app.beast.BeastVersion;
+import dr.app.phylogeography.tools.DiscreteTreeToKML;
 import dr.app.util.Arguments;
 import dr.evolution.io.Importer;
 import dr.evolution.io.NewickImporter;
@@ -125,7 +126,7 @@ public class TreeAnnotator {
                          final int burninStates,
                          HeightsSummary heightsOption,
                          double posteriorLimit,
-                         double hpd2D,
+                         double[] hpd2D,
                          Target targetOption,
                          String targetTreeFileName,
                          String inputFileName,
@@ -846,8 +847,20 @@ public class TreeAnnotator {
                                         if (variationInSecond && !variationInFirst)
                                             annotateHPDAttribute(tree, node, name + "2" + "_95%_HPD", 0.95, valuesArray[1]);
 
-                                        if (variationInFirst && variationInSecond)
-                                            annotate2DHPDAttribute(tree, node, name, "_" + (int) (100 * hpd2D) + "%HPD", hpd2D, valuesArray);
+                                        if (variationInFirst && variationInSecond){
+
+                                            for (int l = 0; l < hpd2D.length; l++) {
+
+                                                if (hpd2D[l] > 1) {
+                                                    System.err.println("no HPD for proportion > 1 (" + hpd2D[l] + ")");
+                                                } else if (hpd2D[l] < 0){
+                                                    System.err.println("no HPD for proportion < 0 (" + hpd2D[l] + ")");
+                                                }  else {
+                                                    annotate2DHPDAttribute(tree, node, name, "_" + (int) (100 * hpd2D[l]) + "%HPD", hpd2D[l], valuesArray);
+                                                }
+
+                                           }
+                                        }
                                     }
                                 }
                             }
@@ -1206,7 +1219,8 @@ public class TreeAnnotator {
     int totalTrees = 0;
     int totalTreesUsed = 0;
     double posteriorLimit = 0.0;
-    double hpd2D = 0.80;
+//PL:    double hpd2D = 0.80;
+    double[] hpd2D = {0.80};
 
     private final List<TreeAnnotationPlugin> plugins = new ArrayList<TreeAnnotationPlugin>();
 
@@ -1221,12 +1235,12 @@ public class TreeAnnotator {
             processBivariateAttributes = true;
             System.err.println("JRI loaded. Will process bivariate attributes");
         } catch (UnsatisfiedLinkError e) {
-            System.err.print("JRI not available. ");
+//            System.err.print("JRI not available. ");
             if (!USE_R) {
                 processBivariateAttributes = true;
-                System.err.println("Using Java bivariate attributes");
+//                System.err.println("Using Java bivariate attributes");
             } else {
-                System.err.println("Will not process bivariate attributes");
+//                System.err.println("Will not process bivariate attributes");
             }
         }
     }
@@ -1269,6 +1283,28 @@ public class TreeAnnotator {
         progressStream.println();
     }
 
+    public static double[] parseVariableLengthDoubleArray(String inString) throws Arguments.ArgumentException {
+
+        List<Double> returnList = new ArrayList<Double>();
+        StringTokenizer st = new StringTokenizer(inString,",");
+        while(st.hasMoreTokens()) {
+            try {
+                returnList.add(Double.parseDouble(st.nextToken()));
+            } catch (NumberFormatException e) {
+                throw new Arguments.ArgumentException();
+            }
+
+        }
+
+        if (returnList.size()>0) {
+            double[] doubleArray = new double[returnList.size()];
+            for(int i=0; i<doubleArray.length; i++)
+                doubleArray[i] = returnList.get(i);
+            return doubleArray;
+        }
+        return null;
+    }
+
     //Main method
     public static void main(String[] args) throws IOException {
 
@@ -1322,7 +1358,7 @@ public class TreeAnnotator {
             int burninStates = dialog.getBurninStates();
             int burninTrees = dialog.getBurninTrees();
             double posteriorLimit = dialog.getPosteriorLimit();
-            double hpd2D = 0.80;
+            double[] hpd2D = {0.80};
             Target targetOption = dialog.getTargetOption();
             HeightsSummary heightsOption = dialog.getHeightsOption();
 
@@ -1383,7 +1419,7 @@ public class TreeAnnotator {
                         new Arguments.StringOption("target", "target_file_name", "specifies a user target tree to be annotated"),
                         new Arguments.Option("help", "option to print this message"),
                         new Arguments.Option("forceDiscrete", "forces integer traits to be treated as discrete traits."),
-                        new Arguments.RealOption("hpd2D", "the HPD interval to be used for the bivariate traits")
+                        new Arguments.StringOption("hpd2D", "the HPD interval to be used for the bivariate traits", "specifies a (vector of comma seperated) HPD proportion(s)")
                 });
 
         try {
@@ -1432,9 +1468,13 @@ public class TreeAnnotator {
             posteriorLimit = arguments.getRealOption("limit");
         }
 
-        double hpd2D = 0.80;
+        double[] hpd2D = {80};
         if (arguments.hasOption("hpd2D")) {
-            hpd2D = arguments.getRealOption("hpd2D");
+            try {
+                hpd2D = parseVariableLengthDoubleArray(arguments.getStringOption("hpd2D"));
+            } catch (Arguments.ArgumentException e) {
+                System.err.println("Error reading " + arguments.getStringOption("hpd2D"));
+            }
         }
 
         Target target = Target.MAX_CLADE_CREDIBILITY;
diff --git a/src/dr/app/tracer/application/TracerMacFileMenuFactory.java b/src/dr/app/tracer/application/TracerMacFileMenuFactory.java
index 45f1cc8..2c4e048 100755
--- a/src/dr/app/tracer/application/TracerMacFileMenuFactory.java
+++ b/src/dr/app/tracer/application/TracerMacFileMenuFactory.java
@@ -109,19 +109,40 @@ public class TracerMacFileMenuFactory implements MenuFactory {
 
         menu.addSeparator();
 
-        item = new JMenuItem(frame.getCloseWindowAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
-        menu.add(item);
+        if (frame != null) {
+            item = new JMenuItem(frame.getCloseWindowAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
+            menu.add(item);
 
-        menu.addSeparator();
+            menu.addSeparator();
 
-        item = new JMenuItem(frame.getPrintAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
-        menu.add(item);
+            item = new JMenuItem(frame.getPrintAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
+            menu.add(item);
 
-        item = new JMenuItem(application.getPageSetupAction());
-        item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
-        menu.add(item);
+            item = new JMenuItem(application.getPageSetupAction());
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            menu.add(item);
+
+        } else {
+            // No frame available so create a disabled menu for the default menu bar
+            item = new JMenuItem("Close");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, MenuBarFactory.MENU_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+
+            menu.addSeparator();
+
+            item = new JMenuItem("Print...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+
+            item = new JMenuItem("Page Setup...");
+            item.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, MenuBarFactory.MENU_MASK + ActionEvent.SHIFT_MASK));
+            item.setEnabled(false);
+            menu.add(item);
+        }
 
     }
 
diff --git a/src/dr/app/tracer/application/TracerMenuBarFactory.java b/src/dr/app/tracer/application/TracerMenuBarFactory.java
index f9e7f84..79de150 100644
--- a/src/dr/app/tracer/application/TracerMenuBarFactory.java
+++ b/src/dr/app/tracer/application/TracerMenuBarFactory.java
@@ -35,7 +35,7 @@ public class TracerMenuBarFactory extends DefaultMenuBarFactory {
     public TracerMenuBarFactory() {
         if (OSType.isMac()) {
             registerMenuFactory(new TracerMacFileMenuFactory());
-            registerMenuFactory(new DefaultEditMenuFactory());
+            registerMenuFactory(new MacEditMenuFactory());
 	        registerMenuFactory(new AnalysisMenuFactory());
             registerMenuFactory(new MacWindowMenuFactory());
             registerMenuFactory(new MacHelpMenuFactory());
diff --git a/src/dr/evolution/alignment/Alignment.java b/src/dr/evolution/alignment/Alignment.java
index 8583efc..1778d4f 100644
--- a/src/dr/evolution/alignment/Alignment.java
+++ b/src/dr/evolution/alignment/Alignment.java
@@ -133,7 +133,13 @@ public interface Alignment extends SequenceList, SiteList
 		public double[] getStateFrequencies() {
 			return PatternList.Utils.empiricalStateFrequencies(this);
 		}
-	    // **************************************************************
+
+		@Override
+		public boolean areUnique() {
+			return false;
+		}
+
+		// **************************************************************
 	    // Identifiable IMPLEMENTATION
 	    // **************************************************************
 
diff --git a/src/dr/evolution/alignment/PairedSitePatterns.java b/src/dr/evolution/alignment/PairedSitePatterns.java
index 571d79d..c91232f 100644
--- a/src/dr/evolution/alignment/PairedSitePatterns.java
+++ b/src/dr/evolution/alignment/PairedSitePatterns.java
@@ -187,6 +187,11 @@ public class PairedSitePatterns implements SiteList {
 		return Utils.empiricalStateFrequencies(this);
 	}
 
+	@Override
+	public boolean areUnique() {
+		return false;
+	}
+
 	// **************************************************************
 	// TaxonList IMPLEMENTATION
 	// **************************************************************
diff --git a/src/dr/evolution/alignment/PatternList.java b/src/dr/evolution/alignment/PatternList.java
index 32686e2..c5fdad4 100644
--- a/src/dr/evolution/alignment/PatternList.java
+++ b/src/dr/evolution/alignment/PatternList.java
@@ -93,6 +93,12 @@ public interface PatternList extends TaxonList, Identifiable {
     double[] getStateFrequencies();
 
     /**
+     * Are the patterns only the unique ones (i.e., compressed)?
+     * @return are unique?
+     */
+    boolean areUnique();
+
+    /**
      * Helper routines for pattern lists.
      */
     public static class Utils {
diff --git a/src/dr/evolution/alignment/Patterns.java b/src/dr/evolution/alignment/Patterns.java
index 68d1d03..46f557a 100644
--- a/src/dr/evolution/alignment/Patterns.java
+++ b/src/dr/evolution/alignment/Patterns.java
@@ -468,6 +468,11 @@ public class Patterns implements PatternList {
         return PatternList.Utils.empiricalStateFrequencies(this);
     }
 
+    @Override
+    public boolean areUnique() {
+        return true;
+    }
+
     // **************************************************************
     // TaxonList IMPLEMENTATION
     // **************************************************************
diff --git a/src/dr/evolution/alignment/ResamplePatterns.java b/src/dr/evolution/alignment/ResamplePatterns.java
index af5cd8d..30ec2c8 100644
--- a/src/dr/evolution/alignment/ResamplePatterns.java
+++ b/src/dr/evolution/alignment/ResamplePatterns.java
@@ -140,7 +140,12 @@ public abstract class ResamplePatterns implements PatternList, dr.util.XHTMLable
 		return PatternList.Utils.empiricalStateFrequencies(this);
 	}
 
-   // **************************************************************
+	@Override
+	public boolean areUnique() {
+		return patterns.areUnique();
+	}
+
+	// **************************************************************
     // TaxonList IMPLEMENTATION
     // **************************************************************
 
diff --git a/src/dr/evolution/alignment/SimpleAlignment.java b/src/dr/evolution/alignment/SimpleAlignment.java
index ddfba9e..0c95124 100644
--- a/src/dr/evolution/alignment/SimpleAlignment.java
+++ b/src/dr/evolution/alignment/SimpleAlignment.java
@@ -455,6 +455,11 @@ public class SimpleAlignment extends Sequences implements Alignment, dr.util.XHT
         return PatternList.Utils.empiricalStateFrequencies(this);
     }
 
+    @Override
+    public boolean areUnique() {
+        return false;
+    }
+
     public void setReportCountStatistics(boolean report) {
         countStatistics = report;
     }
diff --git a/src/dr/evolution/alignment/SimpleSiteList.java b/src/dr/evolution/alignment/SimpleSiteList.java
index ee6ac63..3f9c381 100644
--- a/src/dr/evolution/alignment/SimpleSiteList.java
+++ b/src/dr/evolution/alignment/SimpleSiteList.java
@@ -49,7 +49,7 @@ public class SimpleSiteList implements SiteList {
     private int siteCount = 0;
     private int[][] sitePatterns = new int[0][];
 
-    public SimpleSiteList(DataType dataType) {
+	public SimpleSiteList(DataType dataType) {
         this.taxonList = null;
         this.dataType = dataType;
     }
@@ -184,6 +184,11 @@ public class SimpleSiteList implements SiteList {
 		return Utils.empiricalStateFrequencies(this);
 	}
 
+	@Override
+	public boolean areUnique() {
+		return false;
+	}
+
 	// **************************************************************
 	// TaxonList IMPLEMENTATION
 	// **************************************************************
diff --git a/src/dr/evolution/alignment/SitePatterns.java b/src/dr/evolution/alignment/SitePatterns.java
index f2b4e93..dcdbcfa 100644
--- a/src/dr/evolution/alignment/SitePatterns.java
+++ b/src/dr/evolution/alignment/SitePatterns.java
@@ -571,6 +571,11 @@ public class SitePatterns implements SiteList, dr.util.XHTMLable {
         return PatternList.Utils.empiricalStateFrequencies(this);
     }
 
+    @Override
+    public boolean areUnique() {
+        return unique;
+    }
+
     // **************************************************************
     // TaxonList IMPLEMENTATION
     // **************************************************************
diff --git a/src/dr/evolution/coalescent/CataclysmicDemographic.java b/src/dr/evolution/coalescent/CataclysmicDemographic.java
index b923bb0..8615e63 100644
--- a/src/dr/evolution/coalescent/CataclysmicDemographic.java
+++ b/src/dr/evolution/coalescent/CataclysmicDemographic.java
@@ -52,13 +52,13 @@ public class CataclysmicDemographic extends ExponentialGrowth {
 	/**
 	 * returns the positive-valued decline rate
 	 */
-	public final double getDeclineRate() { return d; }
+	public final double getDeclineRate() { return -d; }
 	
 	/**
 	 * sets the decline rate.
 	 */
 	public void setDeclineRate(double d) { 
-		if (d <= 0) throw new IllegalArgumentException();
+//		if (d <= 0) throw new IllegalArgumentException();
 		this.d = d; 
 	}
 	
diff --git a/src/dr/evolution/coalescent/MultiEpochExponential.java b/src/dr/evolution/coalescent/MultiEpochExponential.java
new file mode 100644
index 0000000..92c3fe4
--- /dev/null
+++ b/src/dr/evolution/coalescent/MultiEpochExponential.java
@@ -0,0 +1,195 @@
+/*
+ * MultiEpochExponential.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evolution.coalescent;
+
+/**
+ * This class models a multi-phase exponential growth
+ *
+ * @author Marc A. Suchard
+ */
+
+public class MultiEpochExponential extends ConstantPopulation {
+
+    /**
+     * Construct demographic model with default settings
+     */
+    public MultiEpochExponential(Type units, int numEpoch) {
+
+        super(units);
+        transitionTime = new double[numEpoch - 1];
+        rate = new double[numEpoch];
+    }
+
+    public void setTransitionTime(int index, double transitionTime) {
+        this.transitionTime[index] = transitionTime;
+    }
+
+    public void setGrowthRate(int index, double rate) {
+        this.rate[index] = rate;
+    }
+
+    public double getDemographic(double t) {
+
+        double logDemographic = 0.0;
+        double lastTransitionTime = 0.0;
+        int currentEpoch = 0;
+
+        // Account for all epochs before t
+        while (currentEpoch < transitionTime.length && t > transitionTime[currentEpoch]) {
+            logDemographic += -rate[currentEpoch] * (transitionTime[currentEpoch] - lastTransitionTime);
+            lastTransitionTime = transitionTime[currentEpoch];
+            ++currentEpoch;
+        }
+
+        // Account for epoch that holds t
+        logDemographic += -rate[currentEpoch] * (t - lastTransitionTime);
+
+        return getN0() * Math.exp(logDemographic);
+    }
+
+    private double integrateConstant(double start, double finish, double logDemographic) {
+        return (finish - start) / Math.exp(logDemographic);
+    }
+
+    private double integrateExponential(double start, double finish, double logDemographic, double rate) {
+        return (Math.exp(finish * rate) - Math.exp(start * rate)) / Math.exp(logDemographic) / rate;
+    }
+
+    public double getAnalyticIntegral(double start, double finish) {
+
+        if (start == finish) {
+            return 0.0;
+        }
+
+        double integral = 0.0;
+        double logDemographic = 0.0;
+        double lastTransitionTime = 0.0;
+        int currentEpoch = 0;
+
+        // Account for all epochs before start
+        while (currentEpoch < transitionTime.length && start > transitionTime[currentEpoch]) {
+            logDemographic += -rate[currentEpoch] * (transitionTime[currentEpoch] - lastTransitionTime);
+            lastTransitionTime = transitionTime[currentEpoch];
+            ++currentEpoch;
+        }
+
+        // Account for all epochs before finish
+        while (currentEpoch < transitionTime.length && finish > transitionTime[currentEpoch]) {
+
+            // Add to integral
+            double incr = 0.0;
+            if (rate[currentEpoch] == 0.0) {
+                integral += incr = integrateConstant(start, transitionTime[currentEpoch], logDemographic);
+            } else {
+                integral += incr = integrateExponential(
+                        start - lastTransitionTime,
+                        transitionTime[currentEpoch] - lastTransitionTime,
+                        logDemographic, rate[currentEpoch]);
+            }
+//            System.err.println("begin incr = " + incr + " for " + start + " -> " + transitionTime[currentEpoch] + " or " +
+//                    (start - lastTransitionTime) + " -> " + (transitionTime[currentEpoch] - lastTransitionTime) + " @ " + rate[currentEpoch] + " & " + Math.exp(logDemographic));
+
+            // Update demographic function
+            logDemographic += -rate[currentEpoch] * (transitionTime[currentEpoch] - lastTransitionTime);
+            lastTransitionTime = transitionTime[currentEpoch];
+            start = lastTransitionTime;
+            ++currentEpoch;
+        }
+
+        // End of integral
+        double incr = 0.0;
+        if (rate[currentEpoch] == 0.0) {
+            integral += incr = integrateConstant(start, finish, logDemographic);
+        } else {
+            integral += incr = integrateExponential(
+                    start - lastTransitionTime,
+                    finish - lastTransitionTime,
+                    logDemographic, rate[currentEpoch]);
+        }
+//        System.err.println("final incr = " + incr + " for " + start + " -> " + finish + " or " +
+//                (start - lastTransitionTime) + " -> " + (finish - lastTransitionTime) + " @ " + rate[currentEpoch] + " & " + Math.exp(logDemographic));
+
+        return integral / getN0();
+    }
+
+    /**
+     * Calculates the integral 1/N(x) dx between start and finish.
+     */
+    public double getIntegral(double start, double finish) {
+        double analytic = getAnalyticIntegral(start, finish);
+
+        if (DEBUG) {
+            double numeric = getNumericalIntegral(start, finish);
+
+            if (Math.abs(analytic - numeric) > 1E-10) {
+                System.err.println(analytic);
+                System.err.println(numeric);
+                throw new RuntimeException("Error in analytic calculation");
+            }
+        }
+
+        return analytic;
+    }
+
+    public double getIntensity(double t) { throw new RuntimeException("Not implemented!"); }
+
+    public double getInverseIntensity(double x) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public int getNumArguments() {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public String getArgumentName(int n) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public double getArgument(int n) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public void setArgument(int n, double value) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public double getLowerBound(int n) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    public double getUpperBound(int n) {
+        throw new RuntimeException("Not implemented!");
+    }
+
+    //
+    // private stuff
+    //
+
+    final private double[] transitionTime;
+    final private double[] rate;
+
+    static final private boolean DEBUG = false;
+}
diff --git a/src/dr/evolution/datatype/Microsatellite.java b/src/dr/evolution/datatype/Microsatellite.java
index ba2a405..60fb7d7 100644
--- a/src/dr/evolution/datatype/Microsatellite.java
+++ b/src/dr/evolution/datatype/Microsatellite.java
@@ -124,7 +124,7 @@ public class Microsatellite extends DataType {
             }else{
                 return getState(Integer.parseInt(srtRawLength));
             }
-        }catch(java.lang.NumberFormatException exp){
+        } catch(java.lang.NumberFormatException exp) {
             throw new java.lang.NumberFormatException(srtRawLength+" can not be converted. State needs to be an integer or unknown (?).");
         }
 
@@ -136,7 +136,10 @@ public class Microsatellite extends DataType {
      * @return int      the state of microsatellite allele corresponding to the length
      */
     public int getState(int rawLength){
-        if(rawLength > UNKNOWN_STATE_LENGTH){
+        if(rawLength != UNKNOWN_STATE_LENGTH){
+            if (rawLength < min) {
+                throw new java.lang.IllegalArgumentException("Microsatellite length value is less, (" + rawLength + ") than the specified minimum (" + min + ").");
+            }
             return (int)Math.ceil(((double)rawLength - min)/unitLength);
         }else{
             return stateCount;
diff --git a/src/dr/evolution/io/NexusImporter.java b/src/dr/evolution/io/NexusImporter.java
index a6a9a97..ecce8c5 100644
--- a/src/dr/evolution/io/NexusImporter.java
+++ b/src/dr/evolution/io/NexusImporter.java
@@ -175,7 +175,7 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
      * Parses a 'TREES' block.
      */
     public Tree[] parseTreesBlock(TaxonList taxonList) throws ImportException, IOException {
-        return readTreesBlock(taxonList);
+        return readTreesBlock(taxonList, false);
     }
 
     /**
@@ -266,6 +266,10 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
      * import a single tree.
      */
     public Tree importTree(TaxonList taxonList) throws IOException, ImportException {
+        return importTree(taxonList, false);
+    }
+
+    public Tree importTree(TaxonList taxonList, boolean useTaxonListNumbering) throws IOException, ImportException {
         isReadingTreesBlock = false;
         TaxonList[] aTaxonList = new TaxonList[1];
         aTaxonList[0] = taxonList;
@@ -273,20 +277,24 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
             throw new MissingBlockException("TREES block is missing");
         }
         translationList = readTranslationList(aTaxonList[0], lastToken);
-        return readNextTree(translationList, lastToken);
+        return readNextTree(translationList, lastToken, useTaxonListNumbering ? taxonList : null);
     }
 
     /**
      * import an array of all trees.
      */
     public Tree[] importTrees(TaxonList taxonList) throws IOException, ImportException {
+        return importTrees(taxonList, false);
+    }
+
+    public Tree[] importTrees(TaxonList taxonList, boolean useTaxonListNumbering) throws IOException, ImportException {
         isReadingTreesBlock = false;
         TaxonList[] aTaxonList = new TaxonList[1];
         aTaxonList[0] = taxonList;
         if (!startReadingTrees(aTaxonList)) {
             throw new MissingBlockException("TREES block is missing");
         }
-        return readTreesBlock(aTaxonList[0]);
+        return readTreesBlock(aTaxonList[0], useTaxonListNumbering);
     }
 
     /**
@@ -303,7 +311,7 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
         }
 
         if (nextTree == null) {
-            nextTree = readNextTree(translationList, lastToken);
+            nextTree = readNextTree(translationList, lastToken, null);
         }
 
         return (nextTree != null);
@@ -755,7 +763,7 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
     /**
      * Reads a 'TREES' block.
      */
-    private Tree[] readTreesBlock(TaxonList taxonList) throws ImportException, IOException {
+    private Tree[] readTreesBlock(TaxonList taxonList, boolean useTaxonListNumbering) throws ImportException, IOException {
         ArrayList<Tree> trees = new ArrayList<Tree>();
 
         String[] lastToken = new String[1];
@@ -764,7 +772,7 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
         boolean done = false;
         do {
 
-            Tree tree = readNextTree(translationList, lastToken);
+            Tree tree = readNextTree(translationList, lastToken, useTaxonListNumbering ? taxonList : null);
 
             if (tree != null) {
                 trees.add(tree);
@@ -818,6 +826,10 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
                 } else {
                     taxon = new Taxon(token3);
                 }
+
+                if (translationList.containsKey(token2)) {
+                    throw new BadFormatException("Translation list uses the key, " + token2 + ", more than once.");
+                }
                 translationList.put(token2, taxon);
 
             } while (getLastDelimiter() != ';');
@@ -835,7 +847,7 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
         return translationList;
     }
 
-    private Tree readNextTree(HashMap<String, Taxon> translationList, String[] lastToken) throws ImportException, IOException {
+    private Tree readNextTree(HashMap<String, Taxon> translationList, String[] lastToken, TaxonList taxonList) throws ImportException, IOException {
         try {
             Tree tree = null;
             String token = lastToken[0];
@@ -877,10 +889,14 @@ public class NexusImporter extends Importer implements SequenceImporter, TreeImp
                             Taxon taxon = translationList.get(label);
                             int number;
 
-                            try {
-                                number = Integer.parseInt(label) - 1;
-                            } catch (NumberFormatException nfe) {
-                                number = count;
+                            if (taxonList != null) { // Map back to original numbering from TaxonList
+                                number =  taxonList.getTaxonIndex(taxon);
+                            } else { // Old functionality
+                                try {
+                                    number = Integer.parseInt(label) - 1;
+                                } catch (NumberFormatException nfe) {
+                                    number = count;
+                                }
                             }
 
                             taxonNumberMap.put(taxon, number);
diff --git a/src/dr/evomodel/antigenic/AntigenicLikelihood.java b/src/dr/evomodel/antigenic/AntigenicLikelihood.java
index 351567f..e252f41 100644
--- a/src/dr/evomodel/antigenic/AntigenicLikelihood.java
+++ b/src/dr/evomodel/antigenic/AntigenicLikelihood.java
@@ -25,7 +25,6 @@
 
 package dr.evomodel.antigenic;
 
-import dr.evolution.util.*;
 import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.math.LogTricks;
diff --git a/src/dr/evomodel/antigenic/NPAntigenicLikelihood.java b/src/dr/evomodel/antigenic/NPAntigenicLikelihood.java
index 610a618..d7f2960 100644
--- a/src/dr/evomodel/antigenic/NPAntigenicLikelihood.java
+++ b/src/dr/evomodel/antigenic/NPAntigenicLikelihood.java
@@ -40,12 +40,8 @@ import dr.inference.model.CompoundParameter;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
-import dr.inference.operators.MCMCOperator;
 import dr.math.GammaFunction;
-import dr.math.distributions.MultivariateNormalDistribution;
-import dr.math.matrixAlgebra.SymmetricMatrix;
 import dr.xml.AbstractXMLObjectParser;
-import dr.xml.AttributeRule;
 import dr.xml.ElementRule;
 import dr.xml.StringAttributeRule;
 import dr.xml.XMLObject;
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu1ScaleActiveScaledMu1IntactOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu1ScaleActiveScaledMu1IntactOperator.java
new file mode 100644
index 0000000..b0f8152
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu1ScaleActiveScaledMu1IntactOperator.java
@@ -0,0 +1,277 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class Mu1ScaleActiveScaledMu1IntactOperator extends AbstractCoercableOperator   {
+
+	
+    private MatrixParameter mu = null;
+    private Parameter mu1Scale = null;   
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter virusLocationsTreeNode = null;
+
+    private Parameter indicators;
+    private TreeModel treeModel;
+    
+    private int numdata;   
+    private int numNodes;
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+	private double scaleFactor;
+	
+	public Mu1ScaleActiveScaledMu1IntactOperator(double weight, MatrixParameter virusLocations, MatrixParameter mu, Parameter indicators, Parameter mu1Scale, TreeModel treeModel_in, double scale, MatrixParameter virusLocationsTreeNode_in){
+  
+        super(CoercionMode.COERCION_ON);
+
+		setWeight(weight);
+        this.virusLocations = virusLocations;
+        this.mu = mu;
+        this.indicators = indicators;
+        this.mu1Scale = mu1Scale;
+		this.treeModel= treeModel_in;
+		this.scaleFactor = scale;
+		this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+
+		
+		numNodes = treeModel.getNodeCount();
+		numdata = virusLocations.getColumnDimension();
+
+		correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+		
+		//change mu1Scale
+        //double WALK_SIZE = 0.5;
+		//double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+		double original_mu1Scale_Val = mu1Scale.getParameterValue(0);
+		//double new_mu1Scale_Val = change + original_mu1Scale_Val;
+		 double new_mu1Scale_Val = scale * original_mu1Scale_Val;
+
+		mu1Scale.setParameterValue(0, new_mu1Scale_Val);
+		
+		
+		//make sure all the active mu's first dimension stays intact 
+		for(int i=0; i < numNodes; i++){
+			if( (int) indicators.getParameterValue(i) == 1){
+				double oldValue = mu.getParameter(i).getParameterValue(0);
+				double newValue =  oldValue * original_mu1Scale_Val/new_mu1Scale_Val;
+				mu.getParameter(i).setParameterValue(0, newValue);
+			}
+		}
+		
+		//the virus location needs to be updated because the mu's are updated 	  				
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+		
+        double logq = -Math.log(scale);
+
+		//return 0;
+        return logq;
+	}
+	
+	
+	
+
+	
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+	/*
+    public String getPerformanceSuggestion() {
+        if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+            return "";
+        } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+            return "";
+        } else {
+            return "";
+        }
+    }
+
+    public final void optimize(double targetProb) {
+
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public boolean isOptimizing() {
+        return false;
+    }
+
+    public void setOptimizing(boolean opt) {
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+*/
+    
+    public final static String MU1SCALEACTIVEACTIVEMU1INTACTOPERATOR = "Mu1ScaleActiveScaledMu1IntactOperator";
+
+    public final String getOperatorName() {
+        return MU1SCALEACTIVEACTIVEMU1INTACTOPERATOR;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String VIRUSLOCATIONS = "virusLocations";
+        public final static String VIRUSLOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	public final static String  MU = "mu";
+    	public final static String  MU1SCALE = "mu1Scale";       
+    	public final static String INDICATORS = "indicators";
+    	public final static String SCALE = "scaleFactor";
+
+        public String getParserName() {
+            return MU1SCALEACTIVEACTIVEMU1INTACTOPERATOR;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);
+            
+            XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(VIRUSLOCATIONSTREENODE);
+                MatrixParameter virusLocationsTreeNode = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+        
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(MU1SCALE);
+                Parameter mu1Scale = (Parameter) cxo.getChild(Parameter.class);
+
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+            return new Mu1ScaleActiveScaledMu1IntactOperator(weight, virusLocations, mu, indicators, mu1Scale, treeModel, scale, virusLocationsTreeNode);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes mu1Scale and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return Mu1ScaleActiveScaledMu1IntactOperator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                new ElementRule(VIRUSLOCATIONSTREENODE, MatrixParameter.class),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(MU1SCALE, Parameter.class),
+               new ElementRule(TreeModel.class),
+
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu2ScaleActiveScaledMu2IntactOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu2ScaleActiveScaledMu2IntactOperator.java
new file mode 100644
index 0000000..85cf817
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/Mu2ScaleActiveScaledMu2IntactOperator.java
@@ -0,0 +1,278 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class Mu2ScaleActiveScaledMu2IntactOperator extends AbstractCoercableOperator  {
+
+	
+    private MatrixParameter mu = null;
+    private Parameter mu2Scale = null;   
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter virusLocationsTreeNode = null;
+
+    private Parameter indicators;
+    private TreeModel treeModel;
+    
+    private int numdata;   
+    private int numNodes;
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+	private double scaleFactor;
+	
+	public Mu2ScaleActiveScaledMu2IntactOperator(double weight, MatrixParameter virusLocations, MatrixParameter mu, Parameter indicators, Parameter mu2Scale, TreeModel treeModel_in, double scale, MatrixParameter virusLocationsTreeNode_in){
+   
+        super(CoercionMode.COERCION_ON);
+
+		setWeight(weight);
+        this.virusLocations = virusLocations;
+        this.mu = mu;
+        this.indicators = indicators;
+        this.mu2Scale = mu2Scale;
+		this.treeModel= treeModel_in;
+		this.scaleFactor = scale;
+		this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+
+		
+		numNodes = treeModel.getNodeCount();
+		numdata = virusLocations.getColumnDimension();
+
+		correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+		
+		//change mu1Scale
+        //double WALK_SIZE = 0.5;
+		//double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+		double original_mu2Scale_Val = mu2Scale.getParameterValue(0);
+		//double new_mu1Scale_Val = change + original_mu1Scale_Val;
+		 double new_mu2Scale_Val = scale * original_mu2Scale_Val;
+
+		mu2Scale.setParameterValue(0, new_mu2Scale_Val);
+		
+		
+		//make sure all the active mu's first dimension stays intact 
+		for(int i=0; i < numNodes; i++){
+			if( (int) indicators.getParameterValue(i) == 1){
+				double oldValue = mu.getParameter(i).getParameterValue(1);
+				double newValue =  oldValue * original_mu2Scale_Val/new_mu2Scale_Val;
+				mu.getParameter(i).setParameterValue(1, newValue);
+			}
+		}
+		
+		//the virus location needs to be updated because the mu's are updated 	  				
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+		
+        double logq = -Math.log(scale);
+
+		//return 0;
+        return logq;
+	}
+	
+	
+	
+	
+	
+	
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+	/*
+    public String getPerformanceSuggestion() {
+        if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+            return "";
+        } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+            return "";
+        } else {
+            return "";
+        }
+    }
+
+    public final void optimize(double targetProb) {
+
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public boolean isOptimizing() {
+        return false;
+    }
+
+    public void setOptimizing(boolean opt) {
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+*/
+    
+    public final static String MU2SCALEACTIVEACTIVEMU2INTACTOPERATOR = "Mu2ScaleActiveScaledMu2IntactOperator";
+
+    public final String getOperatorName() {
+        return MU2SCALEACTIVEACTIVEMU2INTACTOPERATOR;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String VIRUSLOCATIONS = "virusLocations";
+        public final static String VIRUSLOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	public final static String  MU = "mu";
+    	public final static String  MU2SCALE = "mu2Scale";       
+    	public final static String INDICATORS = "indicators";
+    	public final static String SCALE = "scaleFactor";
+
+        public String getParserName() {
+            return MU2SCALEACTIVEACTIVEMU2INTACTOPERATOR;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);
+            
+            XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(VIRUSLOCATIONSTREENODE);
+                MatrixParameter virusLocationsTreeNode = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+        
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(MU2SCALE);
+                Parameter mu2Scale = (Parameter) cxo.getChild(Parameter.class);
+
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+            return new Mu2ScaleActiveScaledMu2IntactOperator(weight, virusLocations, mu, indicators, mu2Scale, treeModel, scale, virusLocationsTreeNode);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes mu1Scale and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return Mu2ScaleActiveScaledMu2IntactOperator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                new ElementRule(VIRUSLOCATIONSTREENODE, MatrixParameter.class),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(MU2SCALE, Parameter.class),
+               new ElementRule(TreeModel.class),
+
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbGenericSiteGibbsOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbGenericSiteGibbsOperator.java
new file mode 100644
index 0000000..c8049a5
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbGenericSiteGibbsOperator.java
@@ -0,0 +1,193 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import cern.jet.random.Beta;
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.antigenic.phyloClustering.misc.obsolete.AGLikelihoodTreeCluster;
+import dr.evomodel.antigenic.phyloClustering.misc.obsolete.TreeClusterGibbsOperator;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+
+/**
+ * A Gibbs operator for allocation of items to clusters under a distance dependent Chinese restaurant process.
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class ProbGenericSiteGibbsOperator  extends SimpleMCMCOperator implements GibbsOperator {
+	
+    public final static String CLASSNAME_OPERATOR = "ProbGenericSiteGibbsOperator";
+
+    private TreeClusteringVirusesPrior clusterPrior;
+    private Parameter probSites;
+    private int numSites;
+    
+    private double probSite_alpha = 1;
+    private double probSite_beta = 1;
+    
+    
+    public ProbGenericSiteGibbsOperator(double weight, TreeClusteringVirusesPrior clusterPrior_in, Parameter probSites_in, 
+    		double probSite_alpha_in,
+				double probSite_beta_in) {  	
+    	clusterPrior = clusterPrior_in;
+    	probSites = probSites_in;
+    	numSites = clusterPrior.getNumSites();
+        setWeight(weight);
+		this.probSite_alpha = probSite_alpha_in;
+		this.probSite_beta = probSite_beta_in;
+		
+		//System.out.println("shape = "+ probSite_alpha_in);
+		//System.out.println("shapeB = " + probSite_beta_in);
+
+    }
+    
+
+	public double doOperation() throws OperatorFailedException { 
+		
+//		clusterPrior.sampleCausativeStates();
+       int[] causalCount = clusterPrior.getCausalCount();
+       int[] nonCausalCount = clusterPrior.getNonCausalCount();
+		       
+       //int numSites = 330;
+       int numSites = clusterPrior.getNumSites();
+       
+       double numCausals = 0;
+       double numNonCausals = 0;
+       for(int i=0; i < numSites; i++){
+    	   numCausals = numCausals + causalCount[i];
+    	   numNonCausals = numNonCausals + nonCausalCount[i];
+       }
+       //System.out.println("perform a Gibbs move on the number of sites");
+       //System.out.println("# causals = " + numCausals);
+       //System.out.println("# noncausals = " + numNonCausals);
+       
+ 	   //SHOULD GET IT FROM THE PRIOR SPECIFICATION COZ THEY SHOULD MATCH
+ 	   double value = Beta.staticNextDouble(numCausals+ probSite_alpha, numNonCausals+probSite_beta); //posterior
+ 	   
+ 	   probSites.setParameterValue(0, value);
+ 	   
+ 	  // System.out.println("hehe: " + whichSite + "," + probSites.getParameterValue(whichSite));
+ 	   
+       
+		return 0;
+	}
+	
+	public void accept(double deviation) {
+    	super.accept(deviation);	
+	}
+
+	public void reject() {
+    	super.reject();	
+	}
+
+     
+     //MCMCOperator INTERFACE
+     public final String getOperatorName() {
+         return CLASSNAME_OPERATOR;
+     }
+
+
+
+     public String getPerformanceSuggestion() {
+         if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+             return "";
+         } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+             return "";
+         } else {
+             return "";
+         }
+     }
+
+ 
+    
+ 
+
+     public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+    	public final static String PROBSITES = "probSites";
+        public final static String PROBSITE_ALPHA = "shape";
+        public final static String PROBSITE_BETA = "shapeB";
+
+        public String getParserName() {
+             return CLASSNAME_OPERATOR;
+        }
+
+         /* (non-Javadoc)
+          * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+          */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+         	
+         	//System.out.println("Parser run. Exit now");
+         	//System.exit(0);
+
+             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+             
+             XMLObject cxo = xo.getChild(PROBSITES);
+             Parameter probSites = (Parameter) cxo.getChild(Parameter.class);
+
+             TreeClusteringVirusesPrior clusterPrior = (TreeClusteringVirusesPrior) xo.getChild(TreeClusteringVirusesPrior.class);
+
+      		double probSite_alpha = 1;
+        	if (xo.hasAttribute(PROBSITE_ALPHA)) {
+        		probSite_alpha = xo.getDoubleAttribute(PROBSITE_ALPHA);
+        	}
+    		double probSite_beta = 1;
+        	if (xo.hasAttribute(PROBSITE_BETA)) {
+        		probSite_beta = xo.getDoubleAttribute(PROBSITE_BETA);
+        	}
+                          
+             
+             return new ProbGenericSiteGibbsOperator(weight, clusterPrior, probSites, probSite_alpha, probSite_beta);
+
+         }
+
+         //************************************************************************
+         // AbstractXMLObjectParser implementation
+         //************************************************************************
+
+         public String getParserDescription() {
+             return "An operator that updates the probability of sites given a beta distribution.";
+         }
+
+         public Class getReturnType() {
+             return ProbSitesGibbsOperator.class;
+         }
+
+
+         public XMLSyntaxRule[] getSyntaxRules() {
+             return rules;
+         }
+
+         private final XMLSyntaxRule[] rules = {
+                 AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+ 	            new ElementRule(TreeClusteringVirusesPrior.class),
+                new ElementRule(PROBSITES, Parameter.class),
+         		AttributeRule.newDoubleRule(PROBSITE_ALPHA, true, "the alpha parameter in the Beta prior"),
+         		AttributeRule.newDoubleRule(PROBSITE_BETA, true, "the beta parameter in the Beta prior"),
+
+         };
+     
+     };
+
+
+ 
+     public int getStepCount() {
+         return 1;
+     }
+
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbSitesGibbsOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbSitesGibbsOperator.java
new file mode 100644
index 0000000..3cf6bae
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/ProbSitesGibbsOperator.java
@@ -0,0 +1,184 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import cern.jet.random.Beta;
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.antigenic.phyloClustering.misc.obsolete.AGLikelihoodTreeCluster;
+import dr.evomodel.antigenic.phyloClustering.misc.obsolete.TreeClusterGibbsOperator;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+
+/**
+ * A Gibbs operator for allocation of items to clusters under a distance dependent Chinese restaurant process.
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class ProbSitesGibbsOperator  extends SimpleMCMCOperator implements GibbsOperator {
+	
+    public final static String CLASSNAME_OPERATOR = "ProbSitesGibbsOperator";
+
+    private TreeClusteringVirusesPrior clusterPrior;
+    private Parameter probSites;
+    private int numSites;
+    
+    private double probSite_alpha = 1;
+    private double probSite_beta = 1;
+    
+    
+    public ProbSitesGibbsOperator(double weight, TreeClusteringVirusesPrior clusterPrior_in, Parameter probSites_in, 		 				
+    		double probSite_alpha_in,
+				double probSite_beta_in) {  	
+    	clusterPrior = clusterPrior_in;
+    	probSites = probSites_in;
+    	numSites = clusterPrior.getNumSites();
+        setWeight(weight); 
+		this.probSite_alpha = probSite_alpha_in;
+		this.probSite_beta = probSite_beta_in;
+    }
+    
+
+	public double doOperation() throws OperatorFailedException { 
+		
+//		clusterPrior.sampleCausativeStates();
+       int[] causalCount = clusterPrior.getCausalCount();
+       int[] nonCausalCount = clusterPrior.getNonCausalCount();
+		       
+       //int numSites = 330;
+       int numSites = probSites.getDimension();
+       
+ 	   int whichSite = (int) (Math.floor(Math.random()*numSites)); //choose from possibilities
+
+ 	   //SHOULD GET IT FROM THE PRIOR SPECIFICATION COZ THEY SHOULD MATCH
+ 	   double value = Beta.staticNextDouble(causalCount[whichSite]+probSite_alpha, nonCausalCount[whichSite]+probSite_beta); //posterior
+ 	   
+ 	   probSites.setParameterValue(whichSite, value);
+ 	   
+ 	  // System.out.println("hehe: " + whichSite + "," + probSites.getParameterValue(whichSite));
+ 	   
+       
+		return 0;
+	}
+	
+	public void accept(double deviation) {
+    	super.accept(deviation);	
+	}
+
+	public void reject() {
+    	super.reject();	
+	}
+
+     
+     //MCMCOperator INTERFACE
+     public final String getOperatorName() {
+         return CLASSNAME_OPERATOR;
+     }
+
+
+
+     public String getPerformanceSuggestion() {
+         if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+             return "";
+         } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+             return "";
+         } else {
+             return "";
+         }
+     }
+
+ 
+    
+ 
+
+     public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+    	public final static String PROBSITES = "probSites";
+        public final static String PROBSITE_ALPHA = "shape";
+        public final static String PROBSITE_BETA = "shapeB";
+
+
+        public String getParserName() {
+             return CLASSNAME_OPERATOR;
+        }
+
+         /* (non-Javadoc)
+          * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+          */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+         	
+         	//System.out.println("Parser run. Exit now");
+         	//System.exit(0);
+
+             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+             
+             XMLObject cxo = xo.getChild(PROBSITES);
+             Parameter probSites = (Parameter) cxo.getChild(Parameter.class);
+             
+
+             TreeClusteringVirusesPrior clusterPrior = (TreeClusteringVirusesPrior) xo.getChild(TreeClusteringVirusesPrior.class);
+
+     		double probSite_alpha = 1;
+        	if (xo.hasAttribute(PROBSITE_ALPHA)) {
+        		probSite_alpha = xo.getDoubleAttribute(PROBSITE_ALPHA);
+        	}
+    		double probSite_beta = 1;
+        	if (xo.hasAttribute(PROBSITE_BETA)) {
+        		probSite_beta = xo.getDoubleAttribute(PROBSITE_BETA);
+        	}
+             
+             
+             return new ProbSitesGibbsOperator(weight, clusterPrior, probSites,probSite_alpha, probSite_beta);
+
+         }
+
+         //************************************************************************
+         // AbstractXMLObjectParser implementation
+         //************************************************************************
+
+         public String getParserDescription() {
+             return "An operator that updates the probability of sites given a beta distribution.";
+         }
+
+         public Class getReturnType() {
+             return ProbSitesGibbsOperator.class;
+         }
+
+
+         public XMLSyntaxRule[] getSyntaxRules() {
+             return rules;
+         }
+
+         private final XMLSyntaxRule[] rules = {
+                 AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+         		AttributeRule.newDoubleRule(PROBSITE_ALPHA, true, "the alpha parameter in the Beta prior"),
+         		AttributeRule.newDoubleRule(PROBSITE_BETA, true, "the beta parameter in the Beta prior"),
+
+
+ 	            new ElementRule(TreeClusteringVirusesPrior.class),
+                new ElementRule(PROBSITES, Parameter.class),
+         };
+     
+     };
+
+
+ 
+     public int getStepCount() {
+         return 1;
+     }
+
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/RandomWalkOnActiveMu.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/RandomWalkOnActiveMu.java
new file mode 100644
index 0000000..21df41a
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/RandomWalkOnActiveMu.java
@@ -0,0 +1,247 @@
+
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class RandomWalkOnActiveMu extends AbstractCoercableOperator {
+
+	
+    private MatrixParameter mu = null;
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter virusLocationsTreeNode = null;
+    private Parameter indicators;
+    private TreeModel treeModel;
+    
+    private int numdata;   
+    private int numNodes;
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+
+    private double windowSize = 0.5;
+
+	
+	public RandomWalkOnActiveMu(double weight, MatrixParameter virusLocations, MatrixParameter mu, Parameter indicators,  TreeModel treeModel_in, double windowSize, MatrixParameter virusLocationsTreeNode_in){
+    
+        super(CoercionMode.COERCION_ON);
+		
+		setWeight(weight);
+        this.windowSize = windowSize;
+
+        this.virusLocations = virusLocations;
+        this.mu = mu;
+        this.indicators = indicators;
+		this.treeModel= treeModel_in;
+		this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+		
+		numNodes = treeModel.getNodeCount();
+		numdata = virusLocations.getColumnDimension();
+
+		correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+	
+
+		//first, randomly select an "on" node to overwrite
+		int originalNode = Tree_Clustering_Shared_Routines.findAnOnNodeIncludingRootRandomly(numNodes, indicators);			//find an on-node	
+		//unbounded walk
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    			
+        double change = (2.0 * MathUtils.nextDouble() - 1.0) * windowSize;
+		double originalValue = mu.getParameter(originalNode).getParameterValue(dimSelect);
+		mu.getParameter(originalNode ).setParameterValue(dimSelect, originalValue + change);
+	
+		//a. by removing the selected node, each child of this node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+		LinkedList<Integer> childrenOriginalNode = Tree_Clustering_Shared_Routines.findActiveBreakpointsChildren(originalNode, numNodes, treeModel, indicators);	
+		for(int i=0; i < childrenOriginalNode.size(); i++){
+			int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+			Parameter curMu = mu.getParameter( muIndexNum );
+			double curMu_original = curMu.getParameterValue( dimSelect);
+			mu.getParameter(muIndexNum).setParameterValue(dimSelect, curMu_original - change);
+		}
+		
+		
+		
+		
+		
+		//the virus location needs to be updated because the mu's are updated 	  				
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+		
+
+        return 0.0;
+	}
+	
+	
+	
+	
+	
+	 //MCMCOperator INTERFACE
+    public double getCoercableParameter() {
+        return Math.log(windowSize);
+    }
+
+    public void setCoercableParameter(double value) {
+        windowSize = Math.exp(value);
+    }
+
+    public double getRawParameter() {
+        return windowSize;
+    }
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+
+        double ws = OperatorUtils.optimizeWindowSize(windowSize, prob, targetProb);
+
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try decreasing windowSize to about " + ws;
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try increasing windowSize to about " + ws;
+        } else return "";
+    }
+
+    
+    
+    
+    public final static String RANDOMWALKACTIVEMU = "randomWalkOnActiveMu";
+
+    public final String getOperatorName() {
+        return RANDOMWALKACTIVEMU;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String VIRUSLOCATIONS = "virusLocations";
+        public final static String VIRUSLOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	public final static String  MU = "mu";
+    	public final static String INDICATORS = "indicators";
+    	public final static String WINDOWSIZE = "windowSize";
+
+
+        public String getParserName() {
+            return RANDOMWALKACTIVEMU;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double windowSize = xo.getDoubleAttribute(WINDOWSIZE);
+
+            
+            XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+                cxo = xo.getChild(VIRUSLOCATIONSTREENODE);
+                MatrixParameter virusLocationsTreeNode = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+                
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+            return new RandomWalkOnActiveMu(weight, virusLocations, mu, indicators, treeModel, windowSize, virusLocationsTreeNode);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes serum drift and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return RandomWalkOnActiveMu.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(WINDOWSIZE),
+                new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                new ElementRule(VIRUSLOCATIONSTREENODE, MatrixParameter.class),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(TreeModel.class),
+
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/TreeClusterAlgorithmOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/TreeClusterAlgorithmOperator.java
new file mode 100644
index 0000000..a69e394
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/TreeClusterAlgorithmOperator.java
@@ -0,0 +1,3801 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.AntigenicLikelihood;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.util.DataTable;
+import dr.xml.*;
+
+
+/**
+ * An operator to cluster viruses using a phylogenetic tree
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class TreeClusterAlgorithmOperator extends SimpleMCMCOperator  {
+
+
+    public final static String TREE_CLUSTERALGORITHM_OPERATOR = "TreeClusterAlgorithmOperator";
+
+	
+	//Tuning parameters for proposals..
+	private static final double WALK_SIZE = 4; // or 2 for +/- 1
+    int maxNodeLevel = 4; //multistep - how many steps	
+	
+    
+    
+    //parameters
+    private MatrixParameter mu = null;
+    private Parameter clusterLabels = null;   
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter serumLocations = null;
+
+    private Parameter indicators;
+    private Parameter muPrecision;
+    private TreeModel treeModel;
+  //  private AGLikelihoodTreeCluster clusterLikelihood = null;
+    private AntigenicLikelihood clusterLikelihood = null; 
+
+
+    private Parameter clusterLabelsTreeNode;
+    private MatrixParameter virusLocationsTreeNode;
+    
+    private Parameter mu1Scale = null;
+    private Parameter mu2Scale = null;
+    private Parameter muMean = null;
+   
+    //-----------------------------------------------------------
+    //I think these parameters are obsolete and should be removed 
+	//private Parameter clusterOffsetsParameter;
+	//private Parameter virusOffsetsParameter;
+	//--------------------------------------------------
+	
+	private int numdata; //gets assigned in the constructor
+	private int numNodes; //gets assigned in the constructor
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+
+	//private int[] newClusterLabelArray; //for keeping the cluster labeling consistent
+	//private int[] oldClusterLabelArray; //for keeping the cluster labeling consistent
+	
+	
+    private int operatorSelect = -1; //keep track of which proposal gets called
+     
+    //For profiling acceptance rate
+    private double []acceptNum;
+    private double []rejectNum;
+    
+    private double []acceptDistance;
+    private double []rejectDistance;
+    
+	private int moveCounter = 0; //counts how many moves have been proposed
+	private int BURN_IN = 100000;
+	private int frequencyPrintAcceptance = 1000000;
+	
+	//private int frequencyPrintActive = 10000; //for debugging, if printActiveNodes() is called.
+	private int updateHotNodeFrequencey = 100000; //for the Propose_HotMultistepOnNodeFlipMu operator
+
+	private double muDistance = -1; //for the Proposal_flipIBalanceRestrictive operator
+
+	String[] operatorName =  {"Proposal_changeToAnotherNodeOn", 
+							  "Proposal_changeMuFromPrior", 
+							  "Proposal_flipIandChangeMu", 
+							  "Proposal_changeAnOnMuWalk", 
+							  "Proposal_multistepOnNode", 
+							  "Propose_YandMu" , 
+							  "Propose_YandI", 
+							  "Propose_YandIandmu", 
+							  "Propose_branchOffFlip", 
+							  "Propose_multistepOnNodeFlipMu", 
+							  "Propose_flipI", 
+							  "Propose_changeOnMuAndBalance", 
+							  "Proposal_changeMuFromWalk", 
+							  "Proposal_changeAnOnMuFromPrior", 
+							  "Propose_HotMultistepOnNodeFlipMu",
+							  "Proposal_flipIBalance", 
+							  "Proposal_OnMultistepIExchangeMuAndFlipAnotherI",  
+							  "Proposal_changeRootMuWalk", 
+							  "Proposal_changeRootMuWalkAndBalance",
+							  "Proposal_flipIBalanceRestrictive"};
+    
+	
+	
+	//Decided after profiling acceptance..
+	//Type:			highly crucial		Efficient	Booster to facilitate mixing
+	//Exchange I:	0					9			14
+	//Change mu:	1 (12)				11, 17
+	//Flip I:		15								16
+	//double[] operatorWeight = {1,1,0,0,0,0,0,0,0,1,0,1,0,0,1,1,1,0, 0.1};
+
+	//Decided after profiling acceptance..
+		//Type:			highly crucial		Efficient	Booster to facilitate mixing
+		//Exchange I:	0					9			
+		//Change mu:	1 (12)				11, 17
+		//Flip I:		15								16
+		double[] operatorWeight;// = {1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0, 0};
+	//double[] operatorWeight = {0,1};
+	
+	
+	//variables for the Propose_HotMultistepOnNodeFlipMu
+    int[] hotNodes;
+    int[] freqAcceptNode;
+    private int curNode = 0;
+    
+    
+    
+    
+    
+    //Constructor
+    public TreeClusterAlgorithmOperator(MatrixParameter virusLocations, 
+    									MatrixParameter virusLocationsTreeNode_in,
+    									MatrixParameter serumLocations, 
+    									MatrixParameter mu, 
+    									Parameter clusterLabels, 
+    									double weight, 
+    									//Parameter virusOffsetsParameter, 
+    									//Parameter clusterOffsetsParameter, 
+    									Parameter indicatorsParameter, 
+    									TreeModel treeModel_in, 
+    									AntigenicLikelihood clusterLikelihood_in,
+    									Parameter muPrecision_in, 
+    									DataTable<String[]> proposalWeightTable,
+    									Parameter clusterLabelsTreeNode_in,
+    									Parameter mu1Scale_in,
+    									Parameter mu2Scale_in,
+    									Parameter muMean_in) {
+    	
+    	operatorWeight = new double[proposalWeightTable.getRowCount()];
+        for (int i = 0; i < proposalWeightTable.getRowCount(); i++) {
+        	String[] values = proposalWeightTable.getRow(i);
+        	operatorWeight[i] = Integer.parseInt(values[0]);
+        }
+
+    	
+    	
+    	acceptNum = new double[operatorWeight.length];
+    	rejectNum = new double[operatorWeight.length];
+    	for(int i=0; i < operatorWeight.length; i++){
+    		acceptNum[i] = 0;
+    		rejectNum[i] = 0;
+    	}
+    	
+    	acceptDistance = new double[100];
+    	rejectDistance = new double[100];
+    	for(int i=0; i < 100; i++){
+    		acceptDistance[i] = 0;
+    		rejectDistance[i] = 0;
+    	}
+    	
+    	
+    	System.out.println("Loading the constructor for ClusterAlgorithmOperator");
+		this.treeModel= treeModel_in;
+    	this.mu = mu;
+    	this.clusterLabels = clusterLabels;    	
+        this.virusLocations = virusLocations;
+        this.serumLocations = serumLocations;
+       // this.virusOffsetsParameter = virusOffsetsParameter;
+        //this.clusterOffsetsParameter = clusterOffsetsParameter;
+    	this.indicators = indicatorsParameter;
+    	this.clusterLikelihood = clusterLikelihood_in;
+    	this.muPrecision = muPrecision_in;
+    	this.clusterLabelsTreeNode = clusterLabelsTreeNode_in;
+    	this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+    	
+    	this.mu1Scale = mu1Scale_in;
+    	this.mu2Scale = mu2Scale_in;
+    	this.muMean = muMean_in;
+
+    	
+    	numNodes = treeModel.getNodeCount();
+    	numdata = virusLocations.getColumnDimension();
+    	//numdata = virusOffsetsParameter.getSize();
+        System.out.println("numdata="+ numdata);
+        
+        setWeight(weight);
+        
+        System.out.println("Finished loading the constructor for ClusterAlgorithmOperator");
+        
+        
+    	double sumOperatorWeight =0;
+    	int numOp = operatorWeight.length;
+    	for(int i=0; i < numOp; i++){
+    		sumOperatorWeight += operatorWeight[i];
+    	}
+    	for(int i=0; i<numOp; i++){
+    		operatorWeight[i] = operatorWeight[i]/sumOperatorWeight;
+    	}
+    	
+    	System.out.println("#\tProposal\tCall Weight");
+    	for(int i=0; i< numOp; i++){
+    		System.out.println( i +"\t" + operatorName[i] + "\t" + operatorWeight[i]);
+    	}
+        
+    	
+    	//clusterLabelsTreeNode.setDimension(numNodes);
+    	
+    	correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+    	//setMembershipTreeToVirusIndexes(); //run once to set up the 
+    	//setClusterLabelsUsingIndicators(); 		//Update the cluster labels, after the indicators may have changed.
+    	
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+    	//setVirusLocationAutoCorrelatedModel(); //set virus locations, given the breakpoints,status, and mu parameters	
+    	//setClusterLabelsTreeNodesUsingIndicators();
+    	CompositeSetClusterLabelsTreeNodesAndVirusesUsingIndicators();
+		
+		//to improve multistep sampling..
+		hotNodes = new int[numNodes];
+		freqAcceptNode = new int[numNodes];
+    	for(int i=0; i < numNodes; i++){
+    		hotNodes[i] = 1;
+    		freqAcceptNode[i] = 0;
+    	}
+    
+    	
+  	  //NodeRef root = treeModel.getRoot();
+  	  //System.out.println("Root node number = " + root.getNumber() );
+  	  //System.exit(0);
+    	
+    	
+//    	System.out.println("Hi");
+ //   	for(int i=0; i < numNodes; i++){
+  //  		System.out.println("node " + i + ": " + indicators.getParameterValue(i));
+   // 	}
+    	
+    	
+//    	CompositeSetClusterLabelsTreeNodesAndVirusesUsingIndicators();
+    	
+//    	for(int i=0; i < numNodes; i++){
+ //  		   String treeId = treeModel.getTaxonId(i);
+  //  		System.out.println("node " + i + " " +  treeId +": "+ clusterLabelsTreeNode.getParameterValue(i));
+   // 	}
+    	
+   // 	System.out.println("=============================");
+   // 	for(int i=0; i < numdata; i++){
+ //		   	Parameter v = virusLocations.getParameter(i);
+ //		   	String curName = v.getParameterName();
+  //  		System.out.println( curName +": "+ clusterLabels.getParameterValue(i));
+   // 	}
+    	
+    //	System.exit(0);
+    	
+    	
+    //	loadClusterTreeNodes();
+    //	setMembershipTreeToVirusIndexes(); //run once to set up the 
+    //	PrintsetMembershipTreeToVirusIndexes();
+    //	System.exit(0);
+    	
+    }
+    
+
+
+private void loadClusterTreeNodes() {
+
+	FileReader fileReader2;
+	try {
+		//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+		//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run64/H3N2_mds.breakpoints.log");
+	//	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run79/H3N2_mds.indicators.log");
+		fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test26/run21/treeNodes120K.log");
+		
+	      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+	      String line = null;
+
+	      //skip to the last line
+	      String testLine;
+	      while ((testLine = bReader2.readLine()) != null){
+	    	  line = testLine;
+	      }
+
+	    //  System.out.println(line);
+	      
+	      String datavalue[] = line.split("\t");
+
+	      
+	       //   System.out.println(serumLocationsParameter.getParameterCount());
+	      for (int i = 0; i < treeModel.getNodeCount(); i++) {
+	    	  
+	    	  clusterLabelsTreeNode.setParameterValue(i, Double.parseDouble(datavalue[i+1]));
+	    	 // System.out.println(datavalue[i*2+1]);
+//	    	  System.out.println("indicator=" + indicators.getParameterValue(i));
+	   	  
+	      }
+	      bReader2.close();
+	
+	} catch (FileNotFoundException e) {
+		// TODO Auto-generated catch block
+		e.printStackTrace();
+	} catch (IOException e) {
+		// TODO Auto-generated catch block
+		e.printStackTrace();
+	}        
+
+
+
+}
+
+
+    
+    
+ 
+    /**
+     * change the parameter and return the log hastings ratio.
+     */
+    public final double doOperation() {
+    	double logHastingRatio = 0; //initiate the log Metropolis Hastings ratio of the MCMC
+    	curNode = -1; //reset curNode. curNode is used to keep track of which node gets accepted... for the "hot" multistep proposal.
+    
+    	//Here, the tree doesn't change, so I don't need to repeat this procedure over and over again. just do it once in the constructor
+    		//setMembershipTreeToVirusIndexes(); //run once in case the tree changes - to associate the tree with the virus indexes 
+    		//numNodes = treeModel.getNodeCount(); 
+  	    operatorSelect = MathUtils.randomChoicePDF(operatorWeight);
+  	    
+  	    
+  	    // * * * * * * * * * * * * * * * * * *
+  	    logHastingRatio = performProposal();  //This is the main routine for performing proposals.. it is broken down into many sub-routines to facilitate code maintenance
+  	    // * * * * * * * * * * * * * * * * * *    
+
+  	    //===  After the proposal, update cluster labels and virus locations ===
+  	    // Note: some proposals may not involve the below steps.. For computational efficiency, I might want to only update if needed..  	    
+  	   // setClusterLabelsUsingIndicators(); 		//1. Update the cluster labels, after the indicators parameters may have changed.
+		
+  	  Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+  	    //setVirusLocationAutoCorrelatedModel(); //set virus locations, given the indicators and mu parameters
+		//setClusterLabelsTreeNodesUsingIndicators();  
+		CompositeSetClusterLabelsTreeNodesAndVirusesUsingIndicators();
+		
+		moveCounter ++;
+			
+    	return(logHastingRatio);    	
+    }
+    	
+
+
+
+    private double performProposal() {
+    	
+		double logHR = 0;
+		
+		if(operatorSelect == 0){
+			Proposal_changeToAnotherNodeOn();
+   	 		}
+ 	    else if(operatorSelect == 1){
+	 		logHR = Proposal_changeMuFromPrior(); //update mu 	   
+ 	    	}
+ 	    else if(operatorSelect == 2){
+ 	    	logHR = Proposal_flipIandChangeMu();
+ 	    	}
+ 	    else if(operatorSelect == 3){
+ 	    	logHR = Proposal_changeAnOnMuWalk();
+ 	    	}
+ 	    else if(operatorSelect == 4){
+ 	    	logHR = Proposal_multistepOnNode();
+ 	    	}
+	 	else if(operatorSelect == 5){
+		    logHR = Propose_YandMu();	  //NOT IMPLEMENTED COMPLETELY.. THE MH RATIO IS WRONG
+	 		}
+	 	else if(operatorSelect == 6){
+	 		logHR = Propose_YandI();	 //NOT IMPLEMENTED COMPLETELY.. THE MH RATIO IS WRONG
+		    }
+	 	else if(operatorSelect == 7){
+		    logHR = Propose_YandIandmu();	 //NOT IMPLEMENTED COMPLETELY.. THE MH RATIO IS WRONG
+		    }
+	 	else if(operatorSelect == 8){
+		    logHR = Propose_branchOffFlip();	
+		    }
+	 	else if(operatorSelect == 9){
+		    logHR = Propose_multistepOnNodeFlipMu();	
+		    }
+	 	else if(operatorSelect == 10){
+	 		logHR = Proposal_flipI();	
+		    }
+	 	else if(operatorSelect == 11){
+		    logHR = Propose_changeMuAndBalance();	
+		    }
+	 	else if(operatorSelect == 12){
+	 		logHR = Proposal_changeMuWalk();
+		    	
+		    }
+	 	else if(operatorSelect == 13){
+		    logHR = Proposal_changeAnOnMuFromPrior();
+		    }
+	 	else if(operatorSelect == 14){
+	 		logHR = Proposal_HotMultistepOnNodeFlipMu();
+	 	}
+	 	else if(operatorSelect == 15){
+	 		logHR = Proposal_flipIBalance();
+	 	}
+	 	else if(operatorSelect == 16){
+	 		logHR = Proposal_OnMultistepIExchangeMuAndFlipAnotherI(3);
+	 	}
+	 	else if(operatorSelect == 17){
+	 		//System.out.println("hi: " + operatorWeight[18]);
+	 		logHR = Proposal_changeRootMuWalk();
+	 	}
+	 	else if(operatorSelect == 18){
+	 		logHR = Proposal_changeRootMuWalkAndBalance();
+	 	}
+		
+	 	else if(operatorSelect == 19){
+	 		logHR = Proposal_flipIBalanceRestrictive();
+	 	}
+		
+	 	else if(operatorSelect == 100){
+	 		test1();
+		    }
+	 	else if(operatorSelect == 101){
+	 		test2();
+		    }
+	 	else if(operatorSelect == 102){	 		
+	 		test3();
+		    }
+	 	else{
+	 		//System.out.println("operatorSelect = " + operatorSelect);
+	 		//System.out.println("Unimplemented operator. Quit now");
+	 	}
+		
+		return(logHR);
+	}
+
+
+    
+    
+	//===============================================================================================
+	//===============================================================================================
+	
+	//  BELOW IS A LIST OF PROPOSALS
+	
+	//===============================================================================================
+	//===============================================================================================
+	
+	  
+    
+    
+    
+	private double Proposal_OnMultistepIExchangeMuAndFlipAnotherI(int maxNodeLevelHere) {
+			
+		double logHastingRatio = 0; 
+		
+		int rootNum = treeModel.getRoot().getNumber();
+		
+		//unlike the old version, self-move isn't allowed.
+		
+		int originalNode1 = findAnOnNodeRandomly();			//find an on-node
+		
+		
+	//	System.out.print("Try " + originalNode1);
+		
+		int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+//System.out.print("[");		
+		//1. Select an unoccupied site within some steps away from it.	 
+		 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevelHere && numStepsFromI_selected[i] !=0 &&  i != rootNum;
+			 if(isIn1){
+				possibilities1.addLast(new Integer(i));
+	//			System.out.print(i + ", ");
+			 }
+		 }//end for		
+//System.out.println("]");		 
+		
+		 int numPossibilities1 = possibilities1.size();	
+		 
+		 if(numPossibilities1 > 0){
+			 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1)); //choose from possibilities
+				 int site_add1 = possibilities1.get(whichMove).intValue();
+			
+				// System.out.println(" and select " + site_add1);
+			   //  System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+				 
+			indicators.setParameterValue(originalNode1, 0); // the existing indicator is now set to be off
+			indicators.setParameterValue(site_add1, 1); //set the new selected index to the new node.
+			
+			curNode = site_add1;
+			
+		
+			//Flip mu - so the neighbor that replaces the original node now also inherits the existing node's mu
+			//Parameter originalNodeMu = mu.getParameter(originalNode1+1); //offset of 1
+			Parameter originalNodeMu = mu.getParameter(originalNode1); 
+			double[] tmp = originalNodeMu.getParameterValues();
+		
+			//Parameter newMu = mu.getParameter(site_add1+1); //offset of 1
+			Parameter newMu = mu.getParameter(site_add1); 
+			double[] tmpNew = newMu.getParameterValues();
+			
+			originalNodeMu.setParameterValue(0, tmpNew[0]);
+			originalNodeMu.setParameterValue(1, tmpNew[1]);
+		
+			newMu.setParameterValue(0, tmp[0]);
+			newMu.setParameterValue(1, tmp[1]);
+		
+			
+			
+		
+			//new node calculation
+			int[] numStepsNewNode =determineTreeNeighborhood(site_add1, 100000);
+			 //System.out.print("[");
+			//1. Select an unoccupied site within some steps away from it.
+			 LinkedList<Integer> possibilities2 = new LinkedList<Integer>();
+			 for(int i=0; i < numNodes; i++){
+				// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+				 //make sure no self select
+				 boolean isIn2 = numStepsNewNode[i] <= maxNodeLevelHere && numStepsNewNode[i] !=0   && i != rootNum;
+				 if(isIn2){
+					possibilities2.addLast(new Integer(i));
+				//	System.out.print(i + ", ");
+				 }
+	
+			 }//end for
+			 //System.out.println("]");
+	
+			 
+			 int numPossibilities2 = possibilities2.size();		 
+		
+			
+			
+			//now need to combine the neighborhood of the pivot and the new pivot to determine the second move.
+			LinkedList<Integer> jointPossibilities = new LinkedList<Integer>();
+			int[] nodeInNeighborhood = new int[numNodes];
+			 //System.out.print("[");
+			for(int i=0; i < numPossibilities1; i++){
+				int nodeNumber = possibilities1.get(i).intValue();
+				jointPossibilities.addLast(new Integer(nodeNumber));
+				nodeInNeighborhood[nodeNumber] = 1;
+				//System.out.print(nodeNumber + ", ");
+			}
+	
+			for(int i=0; i < numPossibilities2; i++){
+				int nodeNumber = possibilities2.get(i).intValue();
+				if(nodeInNeighborhood[nodeNumber] == 0){
+					//add, since not in first list
+					jointPossibilities.addLast(new Integer(nodeNumber));
+				//	System.out.print(nodeNumber + ", ");
+				}
+			}
+			 //System.out.println("]");
+			
+			int numJointPossibilities = jointPossibilities.size();
+				
+			//now, flip another multistep status:
+			int whichMove2 = (int) (Math.floor(Math.random()*numJointPossibilities)); //choose from possibilities
+			 int site_add2 = jointPossibilities.get(whichMove2).intValue();
+		
+			if((int)indicators.getParameterValue(site_add2) == 0 ){
+				indicators.setParameterValue(site_add2, 1);
+			}
+			else{
+				indicators.setParameterValue(site_add2, 0);
+			}
+				
+	
+			
+		
+		
+		  //System.out.println("numPossibilities1=" + numPossibilities1 + " numPossibilities2 = " + numPossibilities2);
+
+			//System.out.println("numJointPossibilities = " + numJointPossibilities);
+
+	//forward: 1/N x 1/#from pivot  x 1/#from joint
+	//backward: 1/N x 1/#from new pivot x 1/#from joint
+	//backward/forward = 1/#from new pivot   / (1/#from pivot)
+			logHastingRatio = Math.log( (1/ (double)numPossibilities2) / (1/ (double)numPossibilities1)  );
+			
+			//System.out.println("logHastingRatio = " + logHastingRatio);
+			
+		
+			//System.out.println("need to test the code before using it");
+			//System.exit(0);
+		 }//if numPossibilities1 > 0
+			return logHastingRatio;
+	}
+
+
+
+	private double Proposal_flipIBalance() {
+		
+		//System.out.println("hi it got run");
+		//System.exit(0);
+		
+		
+
+		//System.out.println("root: " + mu.getParameter(0).getParameterValue(0) + "," + mu.getParameter(0).getParameterValue(1));
+		//for(int i=0; i < numNodes; i++){
+			//if( (int) indicators.getParameterValue(i) == 1){
+				//System.out.println(i + ": " + mu.getParameter(i+1).getParameterValue(0) + "," + mu.getParameter(i+1).getParameterValue(1));
+			//}
+		//}
+		
+		
+		int node = findNodeRandomly();
+		//int node = (int) (Math.floor(Math.random()*numNodes));
+	//node = 785;
+		//System.out.println("selected node " + node);
+		//double[] originalValues = mu.getParameter(node +1).getParameterValues();	
+		double[] originalValues = mu.getParameter(node ).getParameterValues();
+		//System.out.println(originalValues[0] + " and " + originalValues[1]);
+		//a. by turning on the selected node, each child of this node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+		LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(node);		
+		
+		if((int)indicators.getParameterValue(node) == 0 ){
+			indicators.setParameterValue(node, 1);
+			//System.out.println("turn it on");
+
+			for(int i=0; i < childrenOriginalNode.size(); i++){
+				int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+				//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+				Parameter curMu = mu.getParameter( muIndexNum );
+				double curMu_original0 = curMu.getParameterValue( 0);
+				mu.getParameter(muIndexNum).setParameterValue(0, curMu_original0 - originalValues[0]);
+				double curMu_original1 = curMu.getParameterValue( 1);
+				mu.getParameter(muIndexNum).setParameterValue(1, curMu_original1 - originalValues[1]);
+				//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+			}
+
+		}
+		else{
+			indicators.setParameterValue(node, 0);
+			//System.out.println("turn it off");
+			for(int i=0; i < childrenOriginalNode.size(); i++){
+				int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+				//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+				Parameter curMu = mu.getParameter( muIndexNum );
+				double curMu_original0 = curMu.getParameterValue( 0);
+				mu.getParameter(muIndexNum).setParameterValue(0, curMu_original0 + originalValues[0]);
+				double curMu_original1 = curMu.getParameterValue( 1);
+				mu.getParameter(muIndexNum).setParameterValue(1, curMu_original1 + originalValues[1]);
+				//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+			}
+			
+		}
+		
+		double coord1 = mu1Scale.getParameterValue(0)*originalValues[0];
+		double coord2 = mu2Scale.getParameterValue(0)*originalValues[1];
+	
+		muDistance = Math.sqrt( coord1*coord1 + coord2*coord2);
+		//System.out.println("root: " + mu.getParameter(0).getParameterValue(0) + "," + mu.getParameter(0).getParameterValue(1));
+		//for(int i=0; i < numNodes; i++){
+			//if( (int) indicators.getParameterValue(i) == 1){
+				//System.out.println(i + ": " + mu.getParameter(i+1).getParameterValue(0) + "," + mu.getParameter(i+1).getParameterValue(1));
+			//}
+		//}
+		
+		
+		//System.exit(0);
+		
+		return(0);
+	}
+
+	
+	private double Proposal_flipIBalanceRestrictive(){
+		int node = findRestrictedNodeRandomly(2); //neighborhood size is 2
+		if(node != -1){
+			double[] originalValues = mu.getParameter(node ).getParameterValues();
+			//System.out.println(originalValues[0] + " and " + originalValues[1]);
+			//a. by turning on the selected node, each child of this node should be updated to keep the absolute location of 
+			//the child cluster fixed as before
+			LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(node);		
+			
+			if((int)indicators.getParameterValue(node) == 0 ){
+				indicators.setParameterValue(node, 1);
+				for(int i=0; i < childrenOriginalNode.size(); i++){
+					int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+					//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+					Parameter curMu = mu.getParameter( muIndexNum );
+					double curMu_original0 = curMu.getParameterValue( 0);
+					mu.getParameter(muIndexNum).setParameterValue(0, curMu_original0 - originalValues[0]);
+					double curMu_original1 = curMu.getParameterValue( 1);
+					mu.getParameter(muIndexNum).setParameterValue(1, curMu_original1 - originalValues[1]);
+					//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+				}
+	
+			}
+			else{
+				indicators.setParameterValue(node, 0);
+				//System.out.println("turn it off");
+				for(int i=0; i < childrenOriginalNode.size(); i++){
+					int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+					//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+					Parameter curMu = mu.getParameter( muIndexNum );
+					double curMu_original0 = curMu.getParameterValue( 0);
+					mu.getParameter(muIndexNum).setParameterValue(0, curMu_original0 + originalValues[0]);
+					double curMu_original1 = curMu.getParameterValue( 1);
+					mu.getParameter(muIndexNum).setParameterValue(1, curMu_original1 + originalValues[1]);
+					//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+				}
+				
+			}			
+		
+			muDistance = Math.sqrt( originalValues[0]*originalValues[0] + originalValues[1]*originalValues[1]);
+			return(0);
+		}
+		else{
+			//don't accept the move, since no valid choice
+			return(Double.NEGATIVE_INFINITY);
+		}
+	}
+	
+	private int findRestrictedNodeRandomly(double neighborhood) {
+		
+		double mu1ScaleValue = mu1Scale.getParameterValue(0);
+		double mu2ScaleValue = mu2Scale.getParameterValue(0);
+		
+
+		int rootNode = treeModel.getRoot().getNumber();
+
+		int numQualified = 0;
+		int[] qualifiedNodes = new int[numNodes];
+		for(int i=0; i < numNodes; i++){
+			if(i != rootNode){
+				Parameter curMu = mu.getParameter(i);
+				double coord1 = mu1ScaleValue * curMu.getParameterValue(0); 
+				double coord2 = mu2ScaleValue * curMu.getParameterValue(1);
+				
+		//		double coord1 = curMu.getParameterValue(0);
+			//	double coord2 = curMu.getParameterValue(1);
+				double dist = Math.sqrt( coord1*coord1 + coord2*coord2);
+				if(dist < neighborhood){
+					qualifiedNodes[numQualified] = i;
+					numQualified++;
+				}
+			}
+		}
+
+		//now draw 
+		if( numQualified >0){
+			int ranSelect = (int) (Math.random()*numQualified); 	
+			int selectedNode = qualifiedNodes[ranSelect];
+			return selectedNode;
+		}
+		return -1; // no node qualified, return -1
+			
+	}
+
+
+
+	private double Proposal_changeRootMuWalkAndBalance(){
+		
+		int rootNum = treeModel.getRoot().getNumber();
+		
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ; 	
+		//double originalValue = mu.getParameter(0).getParameterValue(dimSelect);		
+		//mu.getParameter(0).setParameterValue(dimSelect, originalValue + change);
+		double originalValue = mu.getParameter(rootNum).getParameterValue(dimSelect);
+		mu.getParameter(rootNum).setParameterValue(dimSelect, originalValue + change);
+		
+		//a. by removing the selected node, each child of this node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+		//LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(-1); //find the root's children		
+		LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(rootNum); //find the root's children
+		for(int i=0; i < childrenOriginalNode.size(); i++){
+			//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+			int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+			Parameter curMu = mu.getParameter( muIndexNum );
+			double curMu_original = curMu.getParameterValue( dimSelect);
+			mu.getParameter(muIndexNum).setParameterValue(dimSelect, curMu_original - change);
+			
+			//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+			
+		}
+
+		
+		return(0);
+	
+	}
+
+
+	private double Propose_changeMuAndBalance() {
+
+		//System.out.println("root: " + mu.getParameter(0).getParameterValue(0) + "," + mu.getParameter(0).getParameterValue(1));
+		//for(int i=0; i < numNodes; i++){
+		//	if( (int) indicators.getParameterValue(i) == 1){
+		//		System.out.println(i + ": " + mu.getParameter(i+1).getParameterValue(0) + "," + mu.getParameter(i+1).getParameterValue(1));
+		//	}
+		//}
+		
+		//first, randomly select an "on" node to overwrite
+		int originalNode = findAnOnNodeIncludingRootRandomly();			//find an on-node	
+		//originalNode = 673;
+		
+		
+		//if(originalNode == 802){
+			//System.out.println(treeModel.getRoot().getNumber());
+			//System.out.println("I am walking 802!");
+		//}
+		//unbounded walk
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ; 	
+		
+		
+		//dimSelect = 0;
+		//change = 10;	
+		//double originalValue = mu.getParameter(originalNode +1).getParameterValue(dimSelect);
+		double originalValue = mu.getParameter(originalNode).getParameterValue(dimSelect);
+		//System.out.println("originalValue = " + originalValue);
+		mu.getParameter(originalNode ).setParameterValue(dimSelect, originalValue + change);
+		//mu.getParameter(originalNode + 1).setParameterValue(dimSelect, originalValue + change);
+  		//System.out.println("original node = " + originalNode);
+	
+		//a. by removing the selected node, each child of this node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+		LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(originalNode);	
+		//if(originalNode == 802){
+			//System.out.println("number of child = " + childrenOriginalNode.size());
+		//}
+		for(int i=0; i < childrenOriginalNode.size(); i++){
+			//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+			int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+			//if(originalNode == 802){
+				//System.out.println(" " + muIndexNum + " is a child");
+			//}
+			Parameter curMu = mu.getParameter( muIndexNum );
+			double curMu_original = curMu.getParameterValue( dimSelect);
+			mu.getParameter(muIndexNum).setParameterValue(dimSelect, curMu_original - change);
+			
+			//System.out.println( " " + ( muIndexNum - 1) + " is a child");
+			
+		}
+		
+	//	System.out.println("root: " + mu.getParameter(0).getParameterValue(0) + "," + mu.getParameter(0).getParameterValue(1));
+	//	for(int i=0; i < numNodes; i++){
+	//		if( (int) indicators.getParameterValue(i) == 1){
+	//			System.out.println(i + ": " + mu.getParameter(i+1).getParameterValue(0) + "," + mu.getParameter(i+1).getParameterValue(1));
+	//		}
+	//	}
+		
+		
+		return(0);
+	}
+
+
+
+	private double Proposal_HotMultistepOnNodeFlipMu() {
+		
+		int rootNum = treeModel.getRoot().getNumber();
+
+		//unlike the old version, self-move isn't allowed.
+		
+		int originalNode1 = findAnOnNodeRandomly();			//find an on-node
+		
+		//System.out.print("Try " + originalNode1);
+		int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+		
+		//1. Select an unoccupied site within some steps away from it.	 
+		 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevel && numStepsFromI_selected[i] !=0  &&  i != rootNum;
+			 if(isIn1 && hotNodes[i] ==1){
+				possibilities1.addLast(new Integer(i));
+			 }
+		 }//end for		
+		 
+		 int numPossibilities1 = possibilities1.size();		
+		 
+		 //if there is a single legal configuration to switch to
+		 if(numPossibilities1 > 0){
+		 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1)); //choose from possibilities
+			 int site_add1 = possibilities1.get(whichMove).intValue();
+		//	 System.out.println(" and select " + site_add1);
+		// System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+			 
+		indicators.setParameterValue(originalNode1, 0); // the existing indicator is now set to be off
+		indicators.setParameterValue(site_add1, 1); //set the new selected index to the new node.
+		
+		
+
+		//Flip mu - so the neighbor that replaces the original node now also inherits the existing node's mu
+		//Parameter originalNodeMu = mu.getParameter(originalNode1+1); //offset of 1
+		Parameter originalNodeMu = mu.getParameter(originalNode1); 
+		double[] tmp = originalNodeMu.getParameterValues();
+
+		//Parameter newMu = mu.getParameter(site_add1+1); //offset of 1
+		Parameter newMu = mu.getParameter(site_add1); 
+		double[] tmpNew = newMu.getParameterValues();
+		
+		originalNodeMu.setParameterValue(0, tmpNew[0]);
+		originalNodeMu.setParameterValue(1, tmpNew[1]);
+
+		newMu.setParameterValue(0, tmp[0]);
+		newMu.setParameterValue(1, tmp[1]);
+		
+	
+		//backward calculation
+		int[] numStepsBackward =determineTreeNeighborhood(site_add1, 100000);
+		
+		//1. Select an unoccupied site within some steps away from it.
+		 LinkedList<Integer> possibilities2 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn2 = numStepsBackward[i] <= maxNodeLevel && numStepsBackward[i] !=0   &&  i != rootNum;
+			 if(isIn2 && hotNodes[i] ==1){
+				possibilities2.addLast(new Integer(i));
+			 }
+		 }//end for
+		 int numPossibilities2 = possibilities2.size();		 
+		 //	 System.out.println("numPossibilities1=" + numPossibilities1 + " numPossibilities2 = " + numPossibilities2);
+
+		 
+  		double logHastingRatio = Math.log( (1/ (double)numPossibilities2) / (1/ (double)numPossibilities1)  );
+  		//System.out.println("logHastingRatio = " + logHastingRatio);
+		return logHastingRatio;
+		 }
+		 else{
+			 return Double.NEGATIVE_INFINITY;
+		 }
+	}
+
+
+	
+	private double Propose_multistepOnNodeFlipMu() {
+		double logHastingRatio = 0;
+		
+		//unlike the old version, self-move isn't allowed.
+		int rootNum = treeModel.getRoot().getNumber();
+		
+		int originalNode1 = findAnOnNodeRandomly();			//find an on-node
+		
+		//System.out.print("Try " + originalNode1);
+		int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+		
+		//1. Select an unoccupied site within some steps away from it.	 
+		 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevel && numStepsFromI_selected[i] !=0 &&  i != rootNum
+					 && (int) indicators.getParameterValue(i) == 0;
+			 if(isIn1){
+				possibilities1.addLast(new Integer(i));
+			 }
+		 }//end for		
+		 
+		 int numPossibilities1 = possibilities1.size();	
+		 if(numPossibilities1 > 0){
+			 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1)); //choose from possibilities
+				 int site_add1 = possibilities1.get(whichMove).intValue();
+			//	 System.out.println(" and select " + site_add1);
+			// System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+				 
+			indicators.setParameterValue(originalNode1, 0); // the existing indicator is now set to be off
+			indicators.setParameterValue(site_add1, 1); //set the new selected index to the new node.
+			
+			curNode = site_add1;
+			
+	
+			//Flip mu - so the neighbor that replaces the original node now also inherits the existing node's mu
+			//Parameter originalNodeMu = mu.getParameter(originalNode1+1); //offset of 1
+			Parameter originalNodeMu = mu.getParameter(originalNode1);
+			double[] tmp = originalNodeMu.getParameterValues();
+	
+			//Parameter newMu = mu.getParameter(site_add1+1); //offset of 1
+			Parameter newMu = mu.getParameter(site_add1); 
+			double[] tmpNew = newMu.getParameterValues();
+			
+			originalNodeMu.setParameterValue(0, tmpNew[0]);
+			originalNodeMu.setParameterValue(1, tmpNew[1]);
+	
+			newMu.setParameterValue(0, tmp[0]);
+			newMu.setParameterValue(1, tmp[1]);
+			
+		
+			//backward calculation
+			int[] numStepsBackward =determineTreeNeighborhood(site_add1, 100000);
+			
+			//1. Select an unoccupied site within some steps away from it.
+			 LinkedList<Integer> possibilities2 = new LinkedList<Integer>();
+			 for(int i=0; i < numNodes; i++){
+				// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+				 //make sure no self select
+				 boolean isIn2 = numStepsBackward[i] <= maxNodeLevel && numStepsBackward[i] !=0   &&  i != rootNum
+						 && (int) indicators.getParameterValue(i) == 0;
+				 if(isIn2){
+					possibilities2.addLast(new Integer(i));
+				 }
+			 }//end for
+			 int numPossibilities2 = possibilities2.size();		 
+		 //	 System.out.println("numPossibilities1=" + numPossibilities1 + " numPossibilities2 = " + numPossibilities2);
+		 	if(numPossibilities2 > 0){
+		 		logHastingRatio = Math.log( (1/ (double)numPossibilities2) / (1/ (double)numPossibilities1)  );
+		 	}
+		 }
+  		//System.out.println("logHastingRatio = " + logHastingRatio);
+		return logHastingRatio;
+	}
+
+
+
+	private double Propose_branchOffFlip() {
+		
+		
+		//first, randomly select an "on" node to overwrite
+		int originalNode = findAnOnNodeRandomly();			//find an on-node	
+			
+			
+		//second, randomly select a destination
+		int site_add = findAnOffNodeRandomly();			//sample a node that's not in the cluster.
+	
+		//existing mu
+		Parameter selectedMu = mu.getParameter(originalNode ) ;
+		//Parameter selectedMu = mu.getParameter(originalNode +1) ;
+			
+		double selectedMu0 = selectedMu.getParameterValue(0);
+		double selectedMu1 = selectedMu.getParameterValue(1);
+		
+			
+		//a. by removing the selected node, each child of this node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+		LinkedList<Integer> childrenOriginalNode = findActiveBreakpointsChildren(originalNode);	
+		
+		for(int i=0; i < childrenOriginalNode.size(); i++){
+			int muIndexNum = childrenOriginalNode.get(i).intValue() ;
+			//int muIndexNum = childrenOriginalNode.get(i).intValue() + 1;
+			Parameter curMu = mu.getParameter( muIndexNum );
+			double mu0 = curMu.getParameterValue(0) + selectedMu0;
+			double mu1 = curMu.getParameterValue(1) + selectedMu1;
+			mu.getParameter(muIndexNum).setParameterValue(0, mu0);
+			mu.getParameter(muIndexNum).setParameterValue(1, mu1);
+
+		}
+
+
+			//set indicators AND NEW MU
+			indicators.setParameterValue(site_add, 1);
+			//indicators.setParameterValue(originalNode, 0); //just flip it on. do not replace
+
+			//I think this generate a situation where if a mu walks off, then it gets the breakpoint that doesn't partition.
+			//this creates a scenario where a breakpoint is lost
+			//double change = Math.random()*WALK_SIZE - WALK_SIZE ; 
+			//double newMu0 = selectedMu0 + change;
+			//double change2 = Math.random()*WALK_SIZE- WALK_SIZE ; 
+			//double newMu1 = selectedMu1 + change2;
+			
+			
+
+			double[] oldValues = mu.getParameter(site_add).getParameterValues();
+			//double[] oldValues = mu.getParameter(site_add+1).getParameterValues();
+			
+			//System.out.println(oldValues[0]  + ", " + oldValues[1]);
+			//instead, sample from the normal distribution
+			double[] mean = new double[2];
+			mean[0] = 0;
+			mean[1] = 0;
+			double[][] precisionM = new double[2][2];
+			//double precision = 1/TreeClusterViruses.getSigmaSq();
+			double precision = muPrecision.getParameterValue(0);
+			precisionM[0][0] = precision;
+			precisionM[0][1] = 0;
+			precisionM[1][0] = 0;
+			precisionM[1][1] = precision;
+			
+			
+			double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+			//System.out.println(values[0]  + ", " + values[1]); 
+			
+			mu.getParameter(site_add).setParameterValue(0,values[0]);
+			mu.getParameter(site_add).setParameterValue(1,values[1]);
+
+			//mu.getParameter(site_add+1).setParameterValue(0,values[0]);
+			//mu.getParameter(site_add+1).setParameterValue(1,values[1]);
+
+			
+
+			selectedMu.setParameterValue(0, values[0]);
+			selectedMu.setParameterValue(1, values[1]);
+			
+			
+			
+
+			//b. by adding the new selected node, each child of this new node should be updated to keep the absolute location of 
+		//the child cluster fixed as before
+			LinkedList<Integer> childrenNewNode = findActiveBreakpointsChildren(site_add);
+			
+			for(int i=0; i < childrenNewNode.size(); i++){
+				//int muIndexNum = childrenNewNode.get(i).intValue() + 1;
+				int muIndexNum = childrenNewNode.get(i).intValue() ;
+				Parameter curMu = mu.getParameter( muIndexNum);
+				double mu0 = curMu.getParameterValue(0) - values[0];
+				double mu1 = curMu.getParameterValue(1) - values[1];
+				mu.getParameter(muIndexNum).setParameterValue(0, mu0);
+				mu.getParameter(muIndexNum).setParameterValue(1, mu1);
+			}
+			
+			
+			
+			
+			
+			double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ; 
+
+
+			return(logHastingRatio);
+	}
+
+
+
+	private double Propose_YandIandmu() {
+
+		int rootNum = treeModel.getRoot().getNumber();
+
+		//first, find a random Y and walk
+			int serum_selected = (int) (Math.floor(Math.random()*getNumSera()));
+			
+			MatrixParameter serumLocations = getSerumLocationsParameter();
+			Parameter serum = serumLocations.getParameter(serum_selected);
+			int whichDimension = (int) (Math.floor(Math.random()*2 )) ; // assume dimension 2
+			double oldValue = serum.getParameterValue(whichDimension);
+			double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+			double value = oldValue+ change;
+			
+			serum.setParameterValue(whichDimension, value);//WAIT.. IF REJECT, DOES IT RESET?
+			
+
+
+			
+			//change I
+		//second, find a RANDOM "on" breakpoint and multistep it..
+			
+			//0. Keep a copy of the original state to calculate the backward move
+			int originalNode1 = findAnOnNodeRandomly();
+
+			//System.out.println("Original breakpoint is " + originalNode1 + " and the original AGlikelihood is " + clusterLikelihood.getLogLikelihood());
+				
+			int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+			
+			//1. Select an unoccupied site within some steps away from it.
+			 
+			 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+			 for(int i=0; i < numNodes; i++){
+				// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+				 //make sure no self select
+				 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevel && numStepsFromI_selected[i] !=0  && i != rootNum;
+				 if(isIn1){
+					possibilities1.addLast(new Integer(i));
+				 }
+			 }//end for
+			
+			 
+			 int numPossibilities1 = possibilities1.size();
+			 
+			 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1));
+				 int site_add1 = possibilities1.get(whichMove).intValue();
+
+			// System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+				 
+			indicators.setParameterValue(site_add1,  1); //set the new selected index to the new node.
+			indicators.setParameterValue(originalNode1, 0); //set the new selected index to the new node.
+					
+
+			double[] oldValues = mu.getParameter(site_add1).getParameterValues();
+			//double[] oldValues = mu.getParameter(site_add1+1).getParameterValues();	
+			//System.out.println(oldValues[0]  + ", " + oldValues[1]); 
+			double[] mean = new double[2];
+			mean[0] = 0;
+			mean[1] = 0;
+			double[][] precisionM = new double[2][2];
+			//double precision = 1/TreeClusterViruses.getSigmaSq();
+			double precision = muPrecision.getParameterValue(0);
+
+			precisionM[0][0] = precision;
+			precisionM[0][1] = 0;
+			precisionM[1][0] = 0;
+			precisionM[1][1] = precision;
+			
+			
+			double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+			//System.out.println(values[0]  + ", " + values[1]); 
+			
+			mu.getParameter(site_add1).setParameterValue(0,values[0]);
+			mu.getParameter(site_add1).setParameterValue(1,values[1]);
+			//mu.getParameter(site_add1+1).setParameterValue(0,values[0]);
+			//mu.getParameter(site_add1+1).setParameterValue(1,values[1]);
+
+			
+			double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ; 
+			return logHastingRatio;
+	}
+
+
+
+	private double Propose_YandI() {
+		//first, find a random Y and walk
+			int serum_selected = (int) (Math.floor(Math.random()*getNumSera()));
+			
+			MatrixParameter serumLocations = getSerumLocationsParameter();
+			Parameter serum = serumLocations.getParameter(serum_selected);
+			int whichDimension = (int) (Math.floor(Math.random()*2 )) ; // assume dimension 2
+			double oldValue = serum.getParameterValue(whichDimension);
+			double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+			double value = oldValue+ change;
+			
+			serum.setParameterValue(whichDimension, value);//WAIT.. IF REJECT, DOES IT RESET?
+			
+
+
+			int rootNum = treeModel.getRoot().getNumber();
+			
+		//second, find a RANDOM "on" breakpoint and multistep it..
+			int originalNode1 = findAnOnNodeRandomly();			//find an on-node	
+			
+			//0. Keep a copy of the original state to calculate the backward move
+
+
+			//System.out.println("Original breakpoint is " + originalNode1 + " and the original AGlikelihood is " + clusterLikelihood.getLogLikelihood());
+				
+			int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+			
+			//1. Select an unoccupied site within some steps away from it.
+			 
+			 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+			 for(int i=0; i < numNodes; i++){
+				// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+				 //make sure no self select
+				 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevel && numStepsFromI_selected[i] !=0  && i != rootNum;
+				 if(isIn1){
+					possibilities1.addLast(new Integer(i));
+				 }
+			 }//end for
+			
+			 
+			 int numPossibilities1 = possibilities1.size();
+			 
+			 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1));
+				 int site_add1 = possibilities1.get(whichMove).intValue();
+
+			// System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+			indicators.setParameterValue(originalNode1, 0); //set the old selected index off
+			indicators.setParameterValue(site_add1, 1); //set the new selected index on
+					
+		
+			
+						
+			
+			//it may be more efficient to find a random breakpoint that's closest to it... but it would be hard to code now..
+			
+			
+		
+		//what's more important? walk E or mu?
+		
+		//and move it toegether.
+		
+		//FOR NOW, don't care about the backward move.. (and MH ratio)..
+		//just want to see even if MH ratio is 1, does it ever get accepted..
+			return 0;
+	}
+
+
+
+	private double Propose_YandMu() {
+		//first, find a random Y and walk
+			int serum_selected = (int) (Math.floor(Math.random()*getNumSera()));
+			
+			MatrixParameter serumLocations = getSerumLocationsParameter();
+			Parameter serum = serumLocations.getParameter(serum_selected);
+			int whichDimension = (int) (Math.floor(Math.random()*2 )) ; // assume dimension 2
+			double oldValue = serum.getParameterValue(whichDimension);
+			double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+			double value = oldValue+ change;
+			
+			serum.setParameterValue(whichDimension, value);//WAIT.. IF REJECT, DOES IT RESET?
+			
+
+
+			
+			int selectedIndex = findAnOnNodeRandomly();			//find an on-node	
+			
+			
+			double[] oldValues = mu.getParameter(selectedIndex).getParameterValues();	
+
+			//double[] oldValues = mu.getParameter(selectedIndex+1).getParameterValues();	
+			//System.out.println(oldValues[0]  + ", " + oldValues[1]); 
+			double[] mean = new double[2];
+			mean[0] = 0;
+			mean[1] = 0;
+			double[][] precisionM = new double[2][2];
+			//double precision = 1/TreeClusterViruses.getSigmaSq();
+			double precision = muPrecision.getParameterValue(0);
+
+			precisionM[0][0] = precision;
+			precisionM[0][1] = 0;
+			precisionM[1][0] = 0;
+			precisionM[1][1] = precision;
+			
+			
+			double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+			//System.out.println(values[0]  + ", " + values[1]);
+			mu.getParameter(selectedIndex).setParameterValue(0,values[0]);
+			mu.getParameter(selectedIndex).setParameterValue(1,values[1]);
+			//mu.getParameter(selectedIndex+1).setParameterValue(0,values[0]);
+			//mu.getParameter(selectedIndex+1).setParameterValue(1,values[1]);
+
+			
+			double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ; 
+			
+			//System.out.println("logHastingRatio = " + logHastingRatio);
+			
+			
+			// but hey, this is not moving the first node.. (it's okay for now)
+			
+			
+			//System.out.println("The first node selected is " + site_add);
+			return logHastingRatio;
+	}
+
+
+
+	private double Proposal_multistepOnNode() {
+		
+		
+		int rootNodeNum = treeModel.getRoot().getNumber();
+		//unlike the old version, self-move isn't allowed.
+		
+		int originalNode1 = findAnOnNodeRandomly();			//find an on-node
+		
+		//System.out.print("Try " + originalNode1);
+		int[] numStepsFromI_selected =determineTreeNeighborhood(originalNode1, 100000);
+		
+		//1. Select an unoccupied site within some steps away from it.	 
+		 LinkedList<Integer> possibilities1 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn1 = numStepsFromI_selected[i] <= maxNodeLevel && numStepsFromI_selected[i] !=0 && i != rootNodeNum;
+			 if(isIn1){
+				possibilities1.addLast(new Integer(i));
+			 }
+		 }//end for		
+		 
+		 int numPossibilities1 = possibilities1.size();		 
+		 int whichMove = (int) (Math.floor(Math.random()*numPossibilities1)); //choose from possibilities
+			 int site_add1 = possibilities1.get(whichMove).intValue();
+			 
+			curNode = site_add1;
+
+		//	 System.out.println(" and select " + site_add1);
+		// System.out.println("selected node = " + site_add1 + " that's " + numStepsFromI_selected[site_add1] + " steps from " + originalNode1);
+			 
+		indicators.setParameterValue(originalNode1, 0); // the existing indicator is now set to be off
+		indicators.setParameterValue(site_add1, 1); //set the new selected index to the new node.
+				
+	
+		//backward calculation
+		int[] numStepsBackward =determineTreeNeighborhood(site_add1, 100000);
+		
+		//1. Select an unoccupied site within some steps away from it.
+		 LinkedList<Integer> possibilities2 = new LinkedList<Integer>();
+		 for(int i=0; i < numNodes; i++){
+			// System.out.println("#steps from I_selected " + numStepsFromI_selected[i]);
+			 //make sure no self select
+			 boolean isIn2 = numStepsBackward[i] <= maxNodeLevel && numStepsBackward[i] !=0 && i != rootNodeNum;
+			 if(isIn2){
+				possibilities2.addLast(new Integer(i));
+			 }
+		 }//end for
+		 int numPossibilities2 = possibilities2.size();		 
+		 //	 System.out.println("numPossibilities1=" + numPossibilities1 + " numPossibilities2 = " + numPossibilities2);
+
+  		double logHastingRatio = Math.log( (1/ (double)numPossibilities2) / (1/ (double)numPossibilities1)  );
+  		//System.out.println("logHastingRatio = " + logHastingRatio);
+		return logHastingRatio;
+	}
+
+	private double Proposal_flipI(){
+		int node = findNodeRandomly();
+		if((int)indicators.getParameterValue(node) == 0 ){
+			indicators.setParameterValue(node, 1);
+		}
+		else{
+			indicators.setParameterValue(node, 0);
+		}
+		return(0);
+	}
+
+	private double Proposal_flipIandChangeMu() {
+		int node = findNodeRandomly();
+		if((int)indicators.getParameterValue(node) == 0 ){
+			indicators.setParameterValue(node, 1);
+		}
+		else{
+			indicators.setParameterValue(node, 0);
+		}
+		
+		double[] oldValues = mu.getParameter(node).getParameterValues();
+		//double[] oldValues = mu.getParameter(node+1).getParameterValues();	
+		//System.out.println(oldValues[0]  + ", " + oldValues[1]); 
+		double[] mean = new double[2];
+		mean[0] = 0;
+		mean[1] = 0;
+		double[][] precisionM = new double[2][2];
+		//double precision = 1/TreeClusterViruses.getSigmaSq();
+		double precision = muPrecision.getParameterValue(0);
+
+		precisionM[0][0] = precision;
+		precisionM[0][1] = 0;
+		precisionM[1][0] = 0;
+		precisionM[1][1] = precision;
+		
+		double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+		//System.out.println(values[0]  + ", " + values[1]); 
+		mu.getParameter(node).setParameterValue(0,values[0]);
+		mu.getParameter(node).setParameterValue(1,values[1]);
+		//mu.getParameter(node+1).setParameterValue(0,values[0]);
+		//mu.getParameter(node+1).setParameterValue(1,values[1]);
+		
+		double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ; 		
+		return(logHastingRatio);	
+		
+		
+		
+		//int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		//double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ; 	
+		//double originalValue = mu.getParameter(node +1).getParameterValue(dimSelect);				
+		//mu.getParameter(node + 1).setParameterValue(dimSelect, originalValue + change);
+	}
+
+
+	private double Proposal_changeRootMuWalk(){
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+		
+		int rootNum = treeModel.getRoot().getNumber();
+	
+		double originalValue = mu.getParameter(rootNum).getParameterValue(dimSelect);		
+		mu.getParameter(rootNum).setParameterValue(dimSelect, originalValue + change);
+		//double originalValue = mu.getParameter(0).getParameterValue(dimSelect);		
+		//mu.getParameter(0).setParameterValue(dimSelect, originalValue + change);
+		return(0);
+	
+	}
+	
+	//Instead of sampling from the prior, I will perform a walk to fine tune things
+	private double Proposal_changeAnOnMuWalk() {
+		int on_mu =  findAnOnNodeIncludingRootRandomly();
+
+	
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ; 
+		double originalValue = mu.getParameter(on_mu).getParameterValue(dimSelect);		
+		mu.getParameter(on_mu).setParameterValue(dimSelect, originalValue + change);
+		//double originalValue = mu.getParameter(on_mu +1).getParameterValue(dimSelect);		
+		//mu.getParameter(on_mu + 1).setParameterValue(dimSelect, originalValue + change);
+		return(0);
+	}
+	
+	
+	private double Proposal_changeAnOnMuFromPrior(){
+		
+		int on_mu =  findAnOnNodeIncludingRootRandomly();
+
+	
+		double[] oldValues = mu.getParameter(on_mu ).getParameterValues(); //
+		
+		//double[] oldValues = mu.getParameter(on_mu + 1).getParameterValues();	// this is not +1 because the root's mu can also be changed
+		//System.out.println(oldValues[0]  + ", " + oldValues[1]); 
+		double[] mean = new double[2];
+		//mean[0] = 0;
+		mean[0] = muMean.getParameterValue(0);
+		mean[1] = 0;
+		double[][] precisionM = new double[2][2];
+		//double precision = 1/TreeClusterViruses.getSigmaSq();
+		double precision = muPrecision.getParameterValue(0);
+
+		precisionM[0][0] = precision;
+		precisionM[0][1] = 0;
+		precisionM[1][0] = 0;
+		precisionM[1][1] = precision;
+		
+		double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+		mu.getParameter(on_mu ).setParameterValue(0,values[0]);  
+		mu.getParameter(on_mu ).setParameterValue(1,values[1]);  
+		
+		//System.out.println(values[0]  + ", " + values[1]); 
+		//mu.getParameter(on_mu + 1).setParameterValue(0,values[0]);  // this is not +1 because the root's mu can also be changed
+		//mu.getParameter(on_mu + 1).setParameterValue(1,values[1]);  // this is not +1 because the root's mu can also be changed
+		
+		double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ; 		
+		return(logHastingRatio);
+		
+	}
+
+
+	private double Proposal_changeMuWalk() {
+
+		//int nodeSelect = (int) Math.floor( Math.random()* (numNodes + 1) ); 		//pick from index... 0,  to numNodes+1
+		int nodeSelect = (int) Math.floor( Math.random()* (numNodes  ) ); 		//pick from index... 0,  to numNodes+1
+		
+		
+		//unbounded walk
+		int dimSelect = (int) Math.floor( Math.random()* 2 );   		  	    		
+		double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ; 	
+		double originalValue = mu.getParameter(nodeSelect).getParameterValue(dimSelect);		
+		mu.getParameter(nodeSelect).setParameterValue(dimSelect, originalValue + change);
+		return(0);
+  				
+	}
+
+
+	private double Proposal_changeMuFromPrior() {
+
+		//int groupSelect = (int) Math.floor( Math.random()* (numNodes + 1) ); 		//pick from index... 0,  to numNodes+1
+		int groupSelect = (int) Math.floor( Math.random()* (numNodes ) ); 		//pick from index... 0,  to numNodes+1
+		
+		double[] oldValues = mu.getParameter(groupSelect).getParameterValues();	// this is not +1 because the root's mu can also be changed
+		//System.out.println(oldValues[0]  + ", " + oldValues[1]); 
+		double[] mean = new double[2];
+		//mean[0] = 0;
+		mean[0] = muMean.getParameterValue(0);
+		//System.out.println("mean[0] = " + muMean.getParameterValue(0));
+		mean[1] = 0;
+		double[][] precisionM = new double[2][2];
+		//double precision = 1/TreeClusterViruses.getSigmaSq();
+		double precision = muPrecision.getParameterValue(0);
+
+		precisionM[0][0] = precision;
+		precisionM[0][1] = 0;
+		precisionM[1][0] = 0;
+		precisionM[1][1] = precision;
+		
+		double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, precisionM);
+		//System.out.println(values[0]  + ", " + values[1]); 
+		mu.getParameter(groupSelect).setParameterValue(0,values[0]);  // this is not +1 because the root's mu can also be changed
+		mu.getParameter(groupSelect).setParameterValue(1,values[1]);  // this is not +1 because the root's mu can also be changed
+		
+		double logHastingRatio = MultivariateNormalDistribution.logPdf(oldValues, mean, precision, 1) - MultivariateNormalDistribution.logPdf(values, mean, precision, 1) ;
+		
+		
+		
+		//Need this to not screw up the acceptance probability when the indicator is off...
+		//if I am using mu_i = 0 | I_i = 0,  the P(mu_i = 1 | I_i = 0) and so this mu_i won't be contributing in the TreeClusterViruses's as a normal prior likelihood
+		//and if I am keeping this proposal to latently move the "mu prime", then I don't want selection done on this proposal if
+		// I_i = 0.
+		//while I think it won't screw up the calculation, it will mess up mixing.
+		
+		//if((int)indicators.getParameterValue(groupSelect) == 0){
+			//logHastingRatio = 0;
+		//}
+		
+		
+		return(logHastingRatio);
+		
+  				
+	}
+
+
+
+	private double Proposal_changeToAnotherNodeOn() {
+
+			//change another node to be "on" 		
+		//System.out.println("a");
+	  		//find an on node, turn it off
+			int onNode = findAnOnNodeRandomly();
+	  		indicators.setParameterValue( onNode , 0);
+	  		
+	  		//System.out.println("b");
+	  		//find an off node, turn it on
+	  		int offNode = findAnOffNodeRandomly();
+	  		indicators.setParameterValue(offNode  ,1);
+	  		
+	  		//System.out.println("Change the indicator only - indicator " + I_selected + " to " + site_add);
+	  		//System.out.println("Excision point = " + excisionPoints.getParameterValue(I_selected));
+
+	  		//System.out.println("done");
+	  		
+	  		curNode = offNode;
+		
+		return(0);
+	}
+
+
+	
+	
+	
+
+    
+    private void test1(){
+ 		//test whether 
+ 		//Propose_changeMuAndBalance() and Proposal_changeAnOnMuWalk() are indeed different.
+ 		
+ 		//first load initial serum location, mu, and status.
+ 		System.out.println("Test whether Propose_changeMuAndBalance() and Proposal_changeAnOnMuWalk() are implemented correctly");
+ 		
+ 		
+ 		
+	   	System.out.print("  [");
+	   	for(int i=0; i < numNodes; i++){
+	   		if( (int)indicators.getParameterValue(i) == 1){
+	   			System.out.print(i + " ");
+	   		}
+	   	}
+	   	System.out.println("]");
+ 		
+ 		Propose_changeMuAndBalance();
+ 		
+ 		
+ 		//Proposal_changeAnOnMuWalk();
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		
+ 		System.exit(0);
+ 		
+ 		
+ 		//old test
+ 		
+ 		
+ 		int originalNode1 = 605;
+ 		
+ 		double originalLikelihood = clusterLikelihood.getLogLikelihood();
+ 		
+ 		int[] distance = determineTreeNeighborhood(originalNode1, 5);
+ 	
+
+		   	
+ 		for(int x=0; x< 1000; x++){
+ 			
+ 			int newNode = 604;
+
+ 		indicators.setParameterValue( originalNode1, 0);
+ 		indicators.setParameterValue( newNode, 1);
+ 		
+
+ 		
+ 		 		
+ 		
+		//Flip mu - so the neighbor that replaces the original node now also inherits the existing node's mu
+ 		Parameter originalNodeMu = mu.getParameter(originalNode1); //offset of 1
+		//Parameter originalNodeMu = mu.getParameter(originalNode1+1); //offset of 1
+		double[] tmp = originalNodeMu.getParameterValues();
+
+		Parameter newMu = mu.getParameter(newNode); //offset of 1
+		//Parameter newMu = mu.getParameter(newNode+1); //offset of 1
+		double[] tmpNew = newMu.getParameterValues();
+		
+		double change0 =  Math.random()*4 - 2;
+		double change1 =  Math.random()*4 - 2;
+		
+		originalNodeMu.setParameterValue(0, tmpNew[0]);
+		originalNodeMu.setParameterValue(1, tmpNew[1]);
+
+		newMu.setParameterValue(0, tmp[0] + change0);
+		newMu.setParameterValue(1, tmp[1] + change1);	 		
+
+ 		 
+				
+		//1. Update the cluster labels, after the breakpoints and status parameters may have changed.
+	//		setClusterLabelsUsingIndicators();
+			//setClusterLabelsArray(newClusterLabelArray);	
+			//relabelClusterLabelsArray(newClusterLabelArray, oldClusterLabelArray);
+			//convertClusterLabelsArrayToParameter( newClusterLabelArray);
+			//oldClusterLabelArray = newClusterLabelArray; //the oldClusterLabelArray gets the current labels, so next time this is updated.
+		
+
+		//2. Update the virus locations (and offsets), given ...
+		Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+			//setVirusLocationAutoCorrelatedModel(); //set virus locations, given the breakpoints,status, and mu parameters
+		
+			double testLikelihood = clusterLikelihood.getLogLikelihood();
+			double diff =  testLikelihood - originalLikelihood ;
+			
+			if(diff > 0){
+				System.out.print("***");
+			}
+		System.out.println("logL= " + testLikelihood + " and diff = " + diff);
+ 		
+ 		
+	
+	   	
+	   	
+	   	
+
+		//revert back
+ 		indicators.setParameterValue( originalNode1, 1);
+ 		indicators.setParameterValue( newNode, 0);
+
+		originalNodeMu.setParameterValue(0, tmp[0]);
+		originalNodeMu.setParameterValue(1, tmp[1]);
+
+		newMu.setParameterValue(0, tmpNew[0]);
+		newMu.setParameterValue(1, tmpNew[1]);	 		
+
+
+	   	
+ 			
+ 		}
+	   	
+	   	System.exit(0);
+
+
+ 		
+    }
+    
+    private void test2(){
+
+ 	   	System.out.print("  [");
+	   	for(int i=0; i < numNodes; i++){
+	   		if( (int)indicators.getParameterValue(i) == 1){
+	   			System.out.print(i + " ");
+	   		}
+	   	}
+	   	System.out.println("]");
+		
+		
+	   	
+	  	//indicators.setParameterValue( 436, 0);
+	   	//indicators.setParameterValue( 549, 0);
+	//   	indicators.setParameterValue( 615, 0);   
+	   	//indicators.setParameterValue(648,0);
+	   	//indicators.setParameterValue(673,0);
+	   	//indicators.setParameterValue(794,0);
+	   	//indicators.setParameterValue(785,0);
+	   	//indicators.setParameterValue(690,0);
+	   	
+	   	//Note: if 615 is not turned on, 604 would be much superior to 605.
+	   	
+ 	//since I changed indicators.. should do this before calculating the original likelihood
+ 		
+
+		//1. Update the cluster labels, after the breakpoints and status parameters may have changed.
+	 //  		setClusterLabelsUsingIndicators();
+	   		//setClusterLabelsArray(newClusterLabelArray);	
+			//relabelClusterLabelsArray(newClusterLabelArray, oldClusterLabelArray);
+			//convertClusterLabelsArrayToParameter( newClusterLabelArray);
+			//oldClusterLabelArray = newClusterLabelArray; //the oldClusterLabelArray gets the current labels, so next time this is updated.
+		
+
+		//2. Update the virus locations (and offsets), given ...
+	   	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+			//setVirusLocationAutoCorrelatedModel(); //set virus locations, given the breakpoints,status, and mu parameters
+		
+			
+			
+			double originalLikelihood = clusterLikelihood.getLogLikelihood();
+			
+			System.out.println("originalLikelihood = " + originalLikelihood);
+ 		
+ 		
+ 		
+ 		int originalNode1 = 605;
+ 		
+ 		
+ 		
+ 		int[] distance = determineTreeNeighborhood(originalNode1, 5);
+ 		
+ 		
+ 		for(int g=0; g < distance.length; g++){
+ 			if(distance[g] < 5 && distance[g] >0){
+ 		 		int newNode = g;
+ 				System.out.print(g + " distance=" + distance[g] + "\t");
+ 				
+ 				
+	 		if( (int) indicators.getParameterValue(newNode) == 1){
+	 			System.out.print("Node already on!!!\n");
+	 		}
+	 		else{
+
+ 		indicators.setParameterValue( originalNode1, 0);
+ 		indicators.setParameterValue( newNode, 1);
+ 		
+
+ 		
+ 		 		
+ 		
+		//Flip mu - so the neighbor that replaces the original node now also inherits the existing node's mu
+		//Parameter originalNodeMu = mu.getParameter(originalNode1+1); //offset of 1
+ 		Parameter originalNodeMu = mu.getParameter(originalNode1); 
+		double[] tmp = originalNodeMu.getParameterValues();
+
+		//Parameter newMu = mu.getParameter(newNode+1); //offset of 1
+		Parameter newMu = mu.getParameter(newNode); 
+		double[] tmpNew = newMu.getParameterValues();
+		
+		originalNodeMu.setParameterValue(0, tmpNew[0]);
+		originalNodeMu.setParameterValue(1, tmpNew[1]);
+
+		newMu.setParameterValue(0, tmp[0]);
+		newMu.setParameterValue(1, tmp[1]);	 		
+
+ 		 
+				
+		//1. Update the cluster labels, after the breakpoints and status parameters may have changed.
+	//		setClusterLabelsUsingIndicators();
+			//setClusterLabelsArray(newClusterLabelArray);	
+			//relabelClusterLabelsArray(newClusterLabelArray, oldClusterLabelArray);
+			//convertClusterLabelsArrayToParameter( newClusterLabelArray);
+			//oldClusterLabelArray = newClusterLabelArray; //the oldClusterLabelArray gets the current labels, so next time this is updated.
+		
+
+		//2. Update the virus locations (and offsets), given ...
+		Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+			//setVirusLocationAutoCorrelatedModel(); //set virus locations, given the breakpoints,status, and mu parameters
+		
+			double testLikelihood = clusterLikelihood.getLogLikelihood();
+			double diff =  testLikelihood - originalLikelihood ;
+			
+			if(diff > -10){
+				System.out.print("***");
+			}
+		System.out.println("logL= " + testLikelihood + " and diff = " + diff);
+ 		
+ 		
+	  // 	System.out.print("  [");
+	   //	for(int i=0; i < numNodes; i++){
+	   //		if( (int)indicators.getParameterValue(i) == 1){
+	   //			System.out.print(i + " ");
+	   //		}
+	   	//}
+	   	//System.out.println("]");
+	   	
+	   	
+	   	
+
+		//revert back
+ 		indicators.setParameterValue( originalNode1, 1);
+ 		indicators.setParameterValue( newNode, 0);
+
+		originalNodeMu.setParameterValue(0, tmp[0]);
+		originalNodeMu.setParameterValue(1, tmp[1]);
+
+		newMu.setParameterValue(0, tmpNew[0]);
+		newMu.setParameterValue(1, tmpNew[1]);	 		
+	 		}
+
+	   	
+ 			}
+ 		}
+	   	
+	   	System.exit(0);
+
+    }
+    
+    
+    private void test3(){
+ 		System.out.println("Turn a new node on");
+
+ 		//696 and 0.1 ,0 works
+	   	//manually flip a new node on
+ 		for(int newNode = 0; newNode < 803; newNode++){
+ 			
+				System.out.print(newNode +  "\t");
+
+ 		if( (int) indicators.getParameterValue(newNode) == 1){
+ 			System.out.print("Node already on!!!\n");
+ 		}
+ 		else{
+ 			
+ 			
+	 		double originalLikelihood = clusterLikelihood.getLogLikelihood();
+
+ 			indicators.setParameterValue( newNode, 1);
+			//Parameter newMu = mu.getParameter(newNode+1); //offset of 1
+ 			Parameter newMu = mu.getParameter(newNode);
+			double[] tmpNew = newMu.getParameterValues();
+			newMu.setParameterValue(0, 0);
+			newMu.setParameterValue(1, 0);
+		
+	 							
+			//1. Update the cluster labels, after the breakpoints and status parameters may have changed.
+		//		setClusterLabelsUsingIndicators();
+				//setClusterLabelsArray(newClusterLabelArray);	
+				//relabelClusterLabelsArray(newClusterLabelArray, oldClusterLabelArray);
+				//convertClusterLabelsArrayToParameter( newClusterLabelArray);
+				//oldClusterLabelArray = newClusterLabelArray; //the oldClusterLabelArray gets the current labels, so next time this is updated.
+			
+
+			//2. Update the virus locations (and offsets), given ...
+			Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+				//setVirusLocationAutoCorrelatedModel(); //set virus locations, given the breakpoints,status, and mu parameters
+			
+				double testLikelihood = clusterLikelihood.getLogLikelihood();
+				double diff =  testLikelihood - originalLikelihood ;
+				
+				if(diff > 0){
+					System.out.print("***");
+				}
+			System.out.println("logL= " + testLikelihood + " and diff = " + diff);
+	 		
+			  // 	System.out.print("  [");
+			   //	for(int i=0; i < numNodes; i++){
+			   //		if( (int)indicators.getParameterValue(i) == 1){
+			   //			System.out.print(i + " ");
+			   //		}
+			   	//}
+			   //	System.out.println("]");
+			   	
+			   	//revert back
+		 		indicators.setParameterValue( newNode, 0);
+				newMu.setParameterValue(0, tmpNew[0]);
+				newMu.setParameterValue(1, tmpNew[1]);	
+			
+ 		}
+ 		
+ 		} //for
+ 		
+ 		System.exit(0);
+    }
+
+	
+	
+	
+	
+	
+	
+	
+	
+	//===============================================================================================
+	//===============================================================================================
+	
+	//  BELOW IS A LIST OF HELPER ROUTINES
+	
+	//===============================================================================================
+	//===============================================================================================
+	
+	
+	public double getNumSera() {
+		return serumLocations.getParameterCount();
+	}
+
+	public MatrixParameter getSerumLocationsParameter() {
+		return serumLocations;
+	}
+  
+	
+	
+	private LinkedList<Integer> findActiveBreakpointsChildren(int selectedNodeNumber) {
+		
+		//a list of breakpoints...
+		
+		LinkedList<Integer> linkedList = new LinkedList<Integer>();
+		int[] nodeBreakpointNumber = new int[numNodes];
+					
+		//int[] nodeStatus = new int[numNodes];
+		//for(int i=0; i < numNodes; i ++){
+		//	nodeStatus[i] = -1;
+		//}
+		
+		//convert to easy process format.
+		//for(int i=0; i < (binSize ); i++){
+		//	if((int) indicators.getParameterValue(i) ==1){
+		//		  nodeStatus[(int)breakPoints.getParameterValue(i)] = i;
+		//	}
+		//}
+		
+		//process the tree and get the vLoc of the viruses..
+		//breadth first depth first..
+		NodeRef cNode = treeModel.getRoot();
+	    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+
+	    
+	    visitlist.add(cNode);
+	    
+	    
+	    //I am not sure if it still works......
+	    
+	    int countProcessed=0;
+	    while(visitlist.size() > 0){
+	    	
+	    	
+	    	countProcessed++;
+	    	//assign value to the current node...
+	    	if(treeModel.getParent(cNode) == null){
+	    		//Parameter curMu = mu.getParameter(0);
+	    		nodeBreakpointNumber[cNode.getNumber()] =   cNode.getNumber();
+	    	}
+	    	else{
+	    		nodeBreakpointNumber[cNode.getNumber()] =   nodeBreakpointNumber[treeModel.getParent(cNode).getNumber()];
+	    		//System.out.println("node#" + cNode.getNumber() + " is " + nodeBreakpointNumber[cNode.getNumber()]); 
+
+	    		if( (int) indicators.getParameterValue(cNode.getNumber()) == 1){
+	    			//System.out.println(cNode.getNumber() + " is a break point");
+		    		//Parameter curMu = mu.getParameter(cNode.getNumber() +1); //+1 because mu0 is reserved for the root.
+	    			//Parameter curMu = mu.getParameter(cNode.getNumber() ); //+1 because mu0 is reserved for the root.
+		    		
+		    		//see if parent's status is the same as the selectedIndex
+		    		if( nodeBreakpointNumber[cNode.getNumber()] ==   selectedNodeNumber ){
+		    			//System.out.println("hihi");
+		    			linkedList.add( cNode.getNumber() );
+		    		}
+		    		//now, replace this nodeBreakpointNumber with its own node number
+		    		nodeBreakpointNumber[cNode.getNumber()] = cNode.getNumber();
+		    				    			  			    			
+	    		}
+	    	}
+	    	
+	    	
+	    	//add all the children to the queue
+  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+  				NodeRef node= treeModel.getChild(cNode,childNum);
+  				visitlist.add(node);
+  	        }
+  			
+	  			
+	  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+
+  			if(visitlist.size() > 0){
+  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+  			}
+  			
+			
+	    }
+	    
+	    //System.out.println("Now printing children of "  + selectedNodeNumber+":");
+		//for(int i=0; i < linkedList.size(); i++){
+		//	System.out.println( linkedList.get(i)  );
+		//}
+		
+		return linkedList;
+	}
+
+
+
+	private int checkSiteHasBeenAddedToOnIndicators(int curTest){
+		
+		int hasBeenAdded=0;
+		if((int) indicators.getParameterValue(curTest) == 1){
+			hasBeenAdded=1;
+		}
+		
+		return(  hasBeenAdded );
+	}
+
+
+	//for Gibbs move
+    private double[] calculateConditionalDistribution(int index) {
+		double []logNumeratorProb = new double[numNodes];
+
+		//calculate the distribution for calculating introducing an excision point in each node
+		for(int curTest=0; curTest < numNodes; curTest++){	
+			
+			int hasBeenAdded = checkSiteHasBeenAddedToOnIndicators(curTest); 			//check if a site has already been added
+  			if(hasBeenAdded ==0){
+
+  		    	indicators.setParameterValue(curTest,1);
+  		    	
+  		    	updateClusterLabelsAndVirusLocationsGivenBreakPointsAndStatus();	    		  					  				
+   				logNumeratorProb[curTest] = clusterLikelihood.getLogLikelihood(); 	//Calculate likelihood
+   				
+   				
+		    	indicators.setParameterValue(curTest,0); //set back to original
+			    
+  			}
+  			else{
+	  			logNumeratorProb[curTest]  = Double.NEGATIVE_INFINITY; //dummy probability
+  			}
+  			
+
+		} //finished curTest
+		
+		 double []condDistribution = calculateConditionalProbabilityGivenLogNumeratorProb(logNumeratorProb);
+		
+//		 System.out.println("-----");
+//		 for(int i=0; i < numNodes; i++){
+//			 if(condDistribution[i] > 0.0000001){
+//				 System.out.println("node " + i + " p=" + condDistribution[i]);
+//			 }
+//		 }
+//		 System.out.println("-----");
+		 
+		 
+		 updateClusterLabelsAndVirusLocationsGivenBreakPointsAndStatus();	
+		 
+		 return condDistribution;
+	}
+
+    
+    
+	private void updateClusterLabelsAndVirusLocationsGivenBreakPointsAndStatus() {
+
+		
+			updateClusterLabelsWhileKeepingLablesConsistent();	  //CURRENTLY DOES NOT WORK..				
+			
+			//setVirusLocationAndOffsets();  //this uses the clusterLabels parameter
+			Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+			//setVirusLocationAutoCorrelatedModel(); //which depends on the status and breakpoints
+					
+	}
+
+
+
+	private void updateClusterLabelsWhileKeepingLablesConsistent() {
+
+		/*
+		int old
+    	//use the tree to re-partition according to the change.
+		clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(testCutNode); //note that instead of using the indicators, it uses the testCutNodes directly
+		relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+		
+		//set cluster label parameter for testing 					
+		for(int i=0; i < numdata; i++){
+			clusterLabels.setParameterValue(i, clusterLabelArray[i]);
+		}
+		*/
+	}
+
+
+
+	private double[] calculateConditionalProbabilityGivenLogNumeratorProb(
+			double[] logNumeratorProb) {
+		int numNodes = logNumeratorProb.length;
+  		double maxLogProb = logNumeratorProb[0];
+  		for(int i=0; i < numNodes; i++ ){
+  			if(logNumeratorProb[i] > maxLogProb){
+  				maxLogProb = logNumeratorProb[i];
+  			}
+  		}  		
+  		
+  		double sumLogDenominator = 0;
+  		for(int i=0; i < numNodes; i++){
+  			if(logNumeratorProb[i] != Double.NEGATIVE_INFINITY){
+  				sumLogDenominator += Math.exp((logNumeratorProb[i]-maxLogProb));
+  			}
+  		}
+  		sumLogDenominator = Math.log(sumLogDenominator) + maxLogProb;
+  		
+  		double sumProb = 0;
+  		double []condProb = new double[numNodes]; 
+  		for(int i=0; i < numNodes; i++){
+  			condProb[i] = Math.exp( logNumeratorProb[i] - sumLogDenominator   );
+			//System.out.println("condProb of site " + i + " = " + condProb[i]);
+					sumProb +=condProb[i];
+				if(condProb[i] > 0.01){
+//					System.out.println("**site " + i + " with prob=" + condProb[i]  + "  steps from previous=" + numStepsFromOrigin[i]);
+			}
+  		}
+  		return(condProb);
+	}
+	
+	
+
+
+	//RETIRE
+	/* no longer needed, I think
+	private int findAnUnoccupiedSite() {
+    	
+    	int hasBeenAdded = 1;
+    	int site_add = -1;
+    	while(hasBeenAdded==1){
+  			site_add = (int) Math.floor( Math.random()*numNodes );
+  			hasBeenAdded=0;
+ 			if( (int) indicators.getParameterValue(site_add) == 1){
+ 				hasBeenAdded=1;
+ 				break;
+ 			}
+    	}
+	  	
+		return site_add;
+	}
+	*/
+
+
+	
+	//may be very inefficient
+	private int findNodeRandomly() {
+    	int nodeIndex= 0;
+    	int I_selected = -1;
+  		while(nodeIndex ==0){
+  			I_selected = (int) (Math.floor(Math.random()*numNodes));
+  			
+  			if(I_selected == treeModel.getRoot().getNumber()){
+  				nodeIndex = 0;
+  			}
+  			else{
+  	  			nodeIndex = 1;  				
+  			}
+  		}    	  		
+    	return I_selected;
+	}
+
+	
+	//may be very inefficient
+	private int findAnOnNodeIncludingRootRandomly() {
+    	int isOn= 0;
+    	int I_selected = -1;
+  		while(isOn ==0){
+  			I_selected = (int) (Math.floor(Math.random()*numNodes));
+  			isOn = (int) indicators.getParameterValue(I_selected);  			
+  		}    	  		
+  		
+    	return I_selected;
+	}
+
+
+	//may be very inefficient
+	private int findAnOnNodeRandomly() {
+    	int isOn= 0;
+    	int I_selected = -1;
+  		while(isOn ==0){
+  			I_selected = (int) (Math.floor(Math.random()*numNodes));
+  			isOn = (int) indicators.getParameterValue(I_selected);
+  			
+  			if(I_selected == treeModel.getRoot().getNumber()){
+  				isOn = 0;
+  			}
+  			
+  		}    	  		
+  		
+    	return I_selected;
+	}
+
+	
+	private int findAnOffNodeRandomly() {
+    	int isOn= 1;
+    	int I_selected = -1;
+  		while(isOn ==1){
+  			I_selected = (int) (Math.floor(Math.random()*numNodes));
+  			isOn = (int) indicators.getParameterValue(I_selected);
+  		}    	  		
+  		
+    	return I_selected;
+	}
+
+	
+	
+	
+
+	/*
+	private void updateK() {
+
+    	//K is changed accordingly..
+		int K_count = 0; //K_int gets updated
+		for(int i=0; i < numNodes; i++){
+			K_count += (int) indicators.getParameterValue(i);
+		}
+		//System.out.println("K now becomes " + K_count);
+		K.setParameterValue(0, K_count); //update   
+ 							
+	}
+*/
+
+
+/*
+	private void convertClusterLabelsArrayToParameter(int[] clusterLabel){
+    	for(int i=0; i < clusterLabel.length; i++){
+    		clusterLabels.setParameterValue(i, clusterLabel[i]);
+    	}
+    }
+  */
+	
+	private void relabelClusterLabelsArray(int[] clusterLabel, int[] oldclusterLabel) {
+
+    	int maxOldLabel = 0;
+    	for(int i=0; i < oldclusterLabel.length; i++){
+    		if(maxOldLabel < oldclusterLabel[i]){
+    			maxOldLabel = oldclusterLabel[i];
+    		}
+    	}
+    	
+    	
+        Map<Integer, Integer> m = new HashMap<Integer, Integer>();
+        int[] isOldUsed = new int[ clusterLabel.length  ]; //an overkill - basically just need the max label in the old cluster
+        
+        for(int i=0; i < clusterLabel.length; i++){
+        	
+        	
+    		if(m.get(new Integer(clusterLabel[i])) == null ){
+    			if(isOldUsed[oldclusterLabel[i]] == 0){
+    				m.put(new Integer(clusterLabel[i]), new Integer(oldclusterLabel[i]));
+    				isOldUsed[oldclusterLabel[i]] = 1;
+    				
+    				if( clusterLabel[i] != oldclusterLabel[i]){
+    					System.out.println("conversion occurred");
+    				}
+    			}
+    			else{
+    				maxOldLabel++;
+    				m.put(new Integer(clusterLabel[i]), new Integer(maxOldLabel));
+    			}
+    			
+    		}
+
+    		clusterLabel[i] = m.get(new Integer( clusterLabel[i])).intValue();
+    		
+    	}
+	}
+
+    
+    
+    
+/*
+	private void setMembershipTreeToVirusIndexes(){
+
+  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+         correspondingTreeIndexForVirus = new int[numdata]; 
+         for(int i=0; i < numdata; i++){
+  		   Parameter v = virusLocations.getParameter(i);
+  		   String curName = v.getParameterName();
+  		  // System.out.println(curName);
+  		   int isFound = 0;
+      	   for(int j=0; j < numNodes; j++){
+      		   String treeId = treeModel.getTaxonId(j);
+      		   if(curName.equals(treeId) ){
+      		//	   System.out.println("  isFound at j=" + j);
+      			   correspondingTreeIndexForVirus[i] = j;
+      			   isFound=1;
+      			   break;
+      		   }	   
+      	   }
+      	   if(isFound ==0){
+      		   System.out.println("not found. Exit now.");
+      		   System.exit(0);
+      	   }     	   
+         }
+    }
+*/
+
+	
+
+	private void PrintsetMembershipTreeToVirusIndexes(){
+
+  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+         correspondingTreeIndexForVirus = new int[numdata]; 
+         for(int i=0; i < numdata; i++){
+  		   Parameter v = virusLocations.getParameter(i);
+  		   String curName = v.getParameterName();
+  		   System.out.print(curName);
+  		   int isFound = 0;
+      	   for(int j=0; j < numNodes; j++){
+      		   String treeId = treeModel.getTaxonId(j);
+      		   if(curName.equals(treeId) ){
+      			   System.out.print("  isFound at j=" + j);
+      			   correspondingTreeIndexForVirus[i] = j;
+      			   System.out.println(" has clusterLabel = " + clusterLabelsTreeNode.getParameterValue(j));
+      			   isFound=1;
+      			   break;
+      		   }	   
+      	   }
+      	   if(isFound ==0){
+      		   System.out.println("not found. Exit now.");
+      		   System.exit(0);
+      	   }     	   
+         }
+    }
+
+
+	
+	
+	
+	
+	
+	
+/*
+	//Obsolete
+	private void setVirusLocationAndOffsets() {
+		
+		//change the mu in the toBin and fromBIn
+		//borrow from getLogLikelihood:
+
+		double[] meanYear = new double[binSize];
+		double[] groupCount = new double[binSize];
+		for(int i=0; i < numdata; i++){
+			int label = (int) clusterLabels.getParameterValue(i);
+			double year  = 0;
+	        if (virusOffsetsParameter != null) {
+	            //	System.out.print("virus Offeset Parameter present"+ ": ");
+	            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+	            //	System.out.print(" drift= " + drift + " ");
+	                year = virusOffsetsParameter.getParameterValue(i);   //just want year[i]
+	                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+	            }
+	            else{
+	            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+	            }
+			meanYear[ label] = meanYear[ label] + year;
+			
+			groupCount[ label  ] = groupCount[ label ]  +1; 
+		}
+					
+		for(int i=0; i < binSize; i++){
+			if(groupCount[i] > 0){
+				meanYear[i] = meanYear[i]/groupCount[i];
+			}
+			//System.out.println(meanYear[i]);
+		}
+
+
+		mu0_offset = new double[binSize];
+		//double[] mu1 = new double[maxLabel];
+				
+		
+		//System.out.println("maxLabel=" + maxLabel);
+		//now, change the mu..
+		for(int i=0; i < binSize; i++){
+			//System.out.println(meanYear[i]*beta);
+			mu0_offset[i] =  meanYear[i];
+			//System.out.println("group " + i + "\t" + mu0_offset[i]);
+		}	
+	//		System.out.println("=====================");
+		
+		
+		//Set  the vLoc to be the corresponding mu values , and clusterOffsetsParameter to be the corresponding offsets
+    	//virus in the same cluster has the same position
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+    		Parameter vLoc = virusLocations.getParameter(i);
+    		//setting the virus locs to be equal to the corresponding mu
+    			double muValue = mu.getParameter(label).getParameterValue(0);    			
+    			vLoc.setParameterValue(0, muValue);
+    			double	muValue2 = mu.getParameter(label).getParameterValue(1);
+   				vLoc.setParameterValue(1, muValue2);
+	   			//System.out.println("vloc="+ muValue + "," + muValue2);
+    	}
+    	
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+   			//if we want to apply the mean year virus cluster offset to the cluster
+   			if(clusterOffsetsParameter != null){
+   			//setting the clusterOffsets to be equal to the mean year of the virus cluster
+   				// by doing this, the virus changes cluster AND updates the offset simultaneously
+   				clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);
+   			}
+ 				//		System.out.println("mu0_offset[label]=" + mu0_offset[label]);
+ 		//		System.out.println("clusterOffsets " +  i +" now becomes =" + clusterOffsetsParameter.getParameterValue(i) );   			
+    	}
+
+    	
+
+    	
+//    	System.out.println("===The on nodes===");
+//    	for(int i=0; i < binSize; i++){	    
+//    		if((int) excisionPoints.getParameterValue(i) == 1){
+//    			System.out.println("Cluster node " + i + " = " + (int) indicators.getParameterValue(i) + "\tstatus=" + (int) excisionPoints.getParameterValue(i));
+//    		}
+//    	}
+    	
+		
+	}
+*/
+
+    
+/*
+	private void setVirusLocationAutoCorrelatedModel() {
+			double[][] nodeloc = new double[numNodes][2];
+
+			//process the tree and get the vLoc of the viruses..
+			//breadth first depth first..
+			NodeRef cNode = treeModel.getRoot();
+		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+		    
+		    visitlist.add(cNode);
+		    
+		    int countProcessed=0;
+		    while(visitlist.size() > 0){
+		    	countProcessed++;
+		    	//assign value to the current node...
+		    	if(treeModel.getParent(cNode) == null){  //this means it is a root node
+		    		Parameter curMu = mu.getParameter( cNode.getNumber() );
+		    		//Parameter curMu = mu.getParameter(0);
+		    		nodeloc[cNode.getNumber()][0]  = curMu.getParameterValue(0);
+		    		nodeloc[cNode.getNumber() ][1] = curMu.getParameterValue(1);
+		    		
+		    		
+		    		Parameter curVirusLoc = virusLocationsTreeNode.getParameter(cNode.getNumber());
+		    		curVirusLoc.setParameterValue(0, curMu.getParameterValue(0) );
+		    		curVirusLoc.setParameterValue(1, curMu.getParameterValue(1) );
+		    	}
+		    	else{
+		    		nodeloc[cNode.getNumber()][0] =   nodeloc[treeModel.getParent(cNode).getNumber()][0];
+		    		nodeloc[cNode.getNumber()][1] =   nodeloc[treeModel.getParent(cNode).getNumber()][1];
+		    		
+		    		if( (int) indicators.getParameterValue(cNode.getNumber()) == 1){
+		    			Parameter curMu = mu.getParameter(cNode.getNumber() ); // no +1 because I don't need another mu- the root's mu takes care of the first cluster's mu 
+			    		//Parameter curMu = mu.getParameter(cNode.getNumber() +1); //+1 because mu0 is reserved for the root.
+		    			nodeloc[cNode.getNumber()][0] += curMu.getParameterValue(0);
+		    			nodeloc[cNode.getNumber()][1] += curMu.getParameterValue(1);	  			    			
+		    		}
+		    		
+		    		Parameter curVirusLoc = virusLocationsTreeNode.getParameter(cNode.getNumber());
+		    		curVirusLoc.setParameterValue(0, nodeloc[cNode.getNumber()][0] );
+		    		curVirusLoc.setParameterValue(1,nodeloc[cNode.getNumber()][1] );
+		    	}
+		    	
+		    	//add all the children to the queue
+	  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+	  				NodeRef node= treeModel.getChild(cNode,childNum);
+	  				visitlist.add(node);
+	  	        }
+	  			
+		  			
+		  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+	
+	  			if(visitlist.size() > 0){
+	  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+	  			}
+	  			
+  			
+		}
+		    
+		    //write the virus locations
+		    for(int i=0; i < numdata; i++){
+		    	Parameter vLocParameter = virusLocations.getParameter(i);
+		    	vLocParameter.setParameterValue(0, nodeloc[correspondingTreeIndexForVirus[i]][0]);
+		    	vLocParameter.setParameterValue(1, nodeloc[correspondingTreeIndexForVirus[i]][1]);
+		    }
+			
+		    
+		    //for(int i=0; i < numdata; i++){
+				//Parameter vLocP= virusLocations.getParameter(i);
+		    	//System.out.println("virus " + vLocP.getId() + "\t" + vLocP.getParameterValue(0) + "," + vLocP.getParameterValue(1)  );	  			    	
+		    //}
+		    	
+	}
+	*/
+
+
+
+	private int[] determineTreeNeighborhood(int curElementNumber, int maxDepth ) {
+			int numNodes = treeModel.getNodeCount();
+ 			
+			//Determining the number of steps from the original site
+			int []numStepsFromOrigin = new int[numNodes];
+			for(int i=0; i < numNodes; i++){
+				numStepsFromOrigin[i] = 100000;
+			}
+ 		
+  		//System.out.println("Excision point = " + excisionPoints.getParameterValue(I_selected));
+
+  		
+  			//int curElementNumber =(int) indicators.getParameterValue(I_selected);
+  			int rootElementNumber = curElementNumber;
+  			//System.out.println("curElementNumber=" + curElementNumber);
+  			NodeRef curElement = treeModel.getNode(curElementNumber); 
+  			
+  		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+  		    LinkedList<NodeRef> fromlist = new LinkedList<NodeRef>();
+  		    LinkedList<Integer> nodeLevel = new LinkedList<Integer>();
+  		    
+  		    //LinkedList<Integer> possibilities = new LinkedList<Integer>();
+  		    
+  		    NodeRef dummyNode = null;
+  		    visitlist.add(curElement);
+  		    fromlist.add(dummyNode);
+  		    nodeLevel.add(new Integer(0));
+  		    
+  		    //int numVisited = 0;
+  		    
+  		  //System.out.println("root node " + curElement.getNumber());
+		    while(visitlist.size() > 0){
+		    	//numVisited++;
+		    	
+  			if(treeModel.getParent(curElement) != null){
+  				//add parent
+		  			NodeRef node= treeModel.getParent(curElement);	  		  			
+  				if(fromlist.getFirst() != node){
+  					if( nodeLevel.getFirst() < maxDepth){
+  						visitlist.add(node);
+  		  				fromlist.add(curElement);
+  		  				nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+  		 // 				System.out.println("node " +  node.getNumber() + " added, parent of " + curElement.getNumber());
+  					}
+  				}
+  			}
+
+			
+  			for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+  				NodeRef node= treeModel.getChild(curElement,childNum);
+  				if(fromlist.getFirst() != node){
+  					if( nodeLevel.getFirst() < maxDepth){
+  						visitlist.add(node);
+  						fromlist.add(curElement);
+  						nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+  			//			System.out.println("node " +  node.getNumber() + " added, child of " + curElement.getNumber());
+  					}
+  				}
+  	        }
+  			
+  			
+				numStepsFromOrigin[curElement.getNumber()] = nodeLevel.getFirst();
+
+  			
+	  			visitlist.pop();
+	  			fromlist.pop();
+	  			nodeLevel.pop();
+
+  			if(visitlist.size() > 0){
+  				curElement = visitlist.getFirst();
+  			}
+  			
+  			
+		}
+		    
+		 			    
+		    
+		    return(numStepsFromOrigin);
+		
+	}
+
+
+
+	
+	private void setClusterLabelsUsingIndicators(){
+
+        int []membership = determine_membership_v2(treeModel);
+        
+        for(int i=0; i < numdata; i++){    
+        	clusterLabels.setParameterValue(i,membership[correspondingTreeIndexForVirus[i]] );
+        }
+	}
+	
+	
+	
+	private void setClusterLabelsTreeNodesUsingIndicators(){
+		
+        int []membership = determine_membership_v2(treeModel);
+        for(int i=0; i < numNodes; i++){
+        	clusterLabelsTreeNode.setParameterValue(i, membership[i]);
+        }
+	}
+	
+	
+	//composite:
+
+	private void CompositeSetClusterLabelsTreeNodesAndVirusesUsingIndicators(){
+		//setMembershipTreeToVirusIndexes(); //note: I have to add this in to fix the inconsistency between
+		//the clusterLabelsTreeNode and clusterLabels.. 
+		//do I really need to do this everytime?
+		//I always thought if I use the same tree, it won't change?
+		
+        int []membership = determine_membership_v2(treeModel);
+        for(int i=0; i < numNodes; i++){
+        	clusterLabelsTreeNode.setParameterValue(i, membership[i]);
+        }
+        for(int i=0; i < numdata; i++){    
+        	clusterLabels.setParameterValue(i,membership[correspondingTreeIndexForVirus[i]] );
+        }
+	}
+
+	
+
+    //traverse down the tree, top down, do calculation
+     int[] determine_membership_v2(TreeModel treeModel){
+	    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    		if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    			numClusters++ ;
+	    			membership[ curElement.getNumber() ] = numClusters - 1; 
+	      	 	}
+	    		else{
+	    			//inherit from parent's cluster assignment
+	    			membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    		}        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(membership);
+    }
+
+	
+	
+	
+	
+	private void setClusterLabelsArray(int[] clusterLabelArray) {
+		int K_int = 0;
+        for(int i=0; i < numNodes; i++){
+      	   if( (int) indicators.getParameterValue( i ) ==1 ){
+      		  K_int++; 
+      	   }
+        }
+       // System.out.println("K_int=" + K_int);
+        int numNodes = treeModel.getNodeCount();
+        int[] cutNodes = new int[K_int];
+ 	   int cutNum = 0;
+ 	   String content = "";
+        for(int i=0; i < numNodes; i++){
+     	   if( (int) indicators.getParameterValue( i ) == 1 ){
+     		   cutNodes[cutNum] = i;
+     		   content +=  i +  ",";
+     		   cutNum++;
+     	   }
+     	  
+        }
+        //System.out.println( content);        
+
+
+        int []membership = determine_membership(treeModel, cutNodes, K_int);
+        
+        for(int i=0; i < numdata; i++){    
+        	clusterLabelArray[i] = membership[correspondingTreeIndexForVirus[i]];
+        }
+    	
+	}
+	
+	/*
+	private void setClusterLabelsParameter() {
+		int K_int = 0;
+        for(int i=0; i < numNodes; i++){
+      	   if( (int) indicators.getParameterValue( i ) ==1 ){
+      		  K_int++; 
+      	   }
+        }
+        int numNodes = treeModel.getNodeCount();
+        int[] cutNodes = new int[K_int];
+ 	   int cutNum = 0;
+ 	   String content = "";
+        for(int i=0; i < numNodes; i++){
+     	   if( (int) indicators.getParameterValue( i ) ==1 ){
+     		   cutNodes[cutNum] = i;
+     		   content +=  i + ",";
+     		   cutNum++;
+     	   }
+     	  
+        }
+        
+
+
+        int []membership = determine_membership(treeModel, cutNodes, K_int);
+        
+        for(int i=0; i < numdata; i++){     	   
+     	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);      	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE, so I have to search for the matching indexes
+     	   //Parameter vloc = virusLocations.getParameter(i);
+     	   //System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+        }
+        
+
+    	
+	}
+*/
+    
+	    
+	
+	
+    
+    private static boolean isCutNode(int number, int cutNodes[], int numCut) {
+    	if(numCut > 0){
+    		for(int i=0; i < numCut; i++){
+    			if(number == cutNodes[i]){
+    				return true;
+    			}
+    		}
+    	}
+    	return false;
+    }
+    
+    
+
+    //traverse down the tree, top down, do calculation
+     int[] determine_membership(TreeModel treeModel, int[] cutNodes, int numCuts){
+	    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    	 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+	    	//if(isCutNode(curElement.getNumber())){
+	    		numClusters++ ;
+	    		membership[ curElement.getNumber() ] = numClusters - 1; 
+	      	}
+	    	else{
+	    		//inherit from parent's cluster assignment
+	    		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    	 }
+	    	        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(membership);
+    }
+
+    
+    
+    
+    
+   
+
+	 //traverse down the tree, top down, do calculation
+	  static int[] determine_membershipByNodeOrder(TreeModel treeModel, int[] cutNodes, int numCuts){
+
+
+	      Map<Integer, Integer> m = new HashMap<Integer, Integer>();
+	      for(int i=0; i < numCuts; i++){
+	    	  m.put(new Integer(cutNodes[i]), new Integer(i+1));
+	    	  
+	    	//  System.out.println(cutNodes[i] + "\t" + (i+1) );
+	      }
+	      
+	  	
+	  NodeRef root = treeModel.getRoot();
+
+	  int numClusters = 1;
+	  LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	  list.addFirst(root);
+
+	  int[] membership = new int[treeModel.getNodeCount()];
+	  for(int i=0; i < treeModel.getNodeCount(); i++){
+	  	membership[i] = -1;
+	  }
+	  membership[root.getNumber()] = 0; //root always given the first cluster
+	        
+	  while(!list.isEmpty()){
+	  	//do things with the current object
+	  	NodeRef curElement = list.pop();
+	  	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	  	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	  	if(treeModel.getNodeTaxon(curElement)== null){
+	  		content += "internal node\t";
+	  	}
+	  	else{
+	  		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	  		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	  	}
+	  	
+	     	if(treeModel.getParent(curElement)== null){
+	  		//content += "no parent";
+	  	}
+	  	else{
+	  		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	  	}
+	  	
+	  	//cluster assignment:
+	  	if(!treeModel.isRoot(curElement)){
+	  	 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+	  	//if(isCutNode(curElement.getNumber())){
+	  		//numClusters++ ;
+	  		//membership[ curElement.getNumber() ] = numClusters - 1;
+	  		// System.out.println("get: curElement" + curElement.getNumber() + "\t" + m.get(new Integer( curElement.getNumber())));
+	  		membership[ curElement.getNumber()] = m.get(new Integer( curElement.getNumber()));
+	  		
+	    }
+	  	else{
+	  		//inherit from parent's cluster assignment
+	  		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	  	 }
+	  	        	
+	  	}//is not Root
+	  	content += " cluster = " + membership[curElement.getNumber()] ; 
+	  	
+	  //	System.out.println(content);
+
+	  	
+	      for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	      	list.addFirst(treeModel.getChild(curElement,childNum));
+	      }
+	  }
+
+	   return(membership);
+	  }
+	   
+
+	  
+	  public void hotNodeProcedure(){
+
+
+	    	//update... Actually, I don't think it should be here
+	    	if( moveCounter % updateHotNodeFrequencey == 0 ){
+	    		System.out.print("Update hot nodes: ");
+	    		
+	    		for(int i=0; i < numNodes; i++){
+	    			if(freqAcceptNode[i] > 0){
+	    				hotNodes[i] = 1;
+	    				System.out.print(i + " ");
+	    			}
+	    			else{
+	    				hotNodes[i] = 0;
+	    			}
+	    		}
+	    		System.out.println("");
+	    		//reset count
+	    		for(int i=0; i < numNodes; i++){
+	    			freqAcceptNode[i] = 0;
+	    		}
+	    	}
+
+	  }
+	  
+	  public void printAcceptance(){
+		  	if(moveCounter > BURN_IN  &&  moveCounter % frequencyPrintAcceptance == 0 ){
+	        	System.out.println("======================================================");
+	        	System.out.println("#\tProposal\tAcceptance Rate");
+	        	for(int i=0; i < operatorWeight.length; i++){
+	        		if(operatorWeight[i] > 0 ){
+	        			System.out.println(i +"\t" + operatorName[i] + "\t" + (double) acceptNum[i] / (double) (acceptNum[i] + (double) rejectNum[i])  + "\taccept=" + acceptNum[i] + " reject=" + rejectNum[i]);
+	        		}
+	        	}
+	        	System.out.println("======================================================");
+
+	        	
+	    	  	//reset acceptance
+	        	for(int i=0; i < operatorWeight.length; i++){
+	        		if(operatorWeight[i] > 0 ){
+	        			acceptNum[i]= 0;
+	        			rejectNum[i] = 0;
+	        		}
+	        	}
+		  
+	        	
+	        	/*
+	        	System.out.println("Acceptance of flipIBalance by distance:");
+	        	for(int i=0; i < 20; i++){
+        			System.out.println( ((double)i ) +"\t" + (double) acceptDistance[i] / (double) (acceptDistance[i] + (double) rejectDistance[i])  + "\taccept=" + acceptDistance[i] + " reject=" + rejectDistance[i]);
+	        	}
+	        	
+	        	
+	        	for(int i=0; i < 100; i++){
+	        		acceptDistance[i] = 0;
+	        		rejectDistance[i] = 0;
+	        	}
+	        	*/
+	    	}
+		  	
+	
+	  
+	  }
+	  
+	  /*
+	  public void printActiveNodes(){
+
+	    	if( moveCounter % frequencyPrintActive == 0 ){
+		   	System.out.print("  [");
+		   	for(int i=0; i < numNodes; i++){
+		   		if( (int)indicators.getParameterValue(i) == 1){
+		   			System.out.print(i + " ");
+		   		}
+		   	}
+		   	System.out.println("]");
+	    	}
+	  }
+	  */
+	public void accept(double deviation) {
+    	super.accept(deviation);
+
+    	
+    	//if(curNode != -1){
+    	//	freqAcceptNode[curNode]++;
+    	//}
+    	//hotNodeProcedure();
+    	
+    	
+    	if(moveCounter > BURN_IN){
+    		acceptNum[operatorSelect]++;
+    	}
+    	printAcceptance();
+    	
+    	//printActiveNodes();
+  
+   	   	
+
+        			
+    			/*
+    			 *     //obsolete - to see which multistep gets acccepted
+    		if(operatorSelect ==2){
+    			acceptNumStep[ howmanyStepsMultiStep]++;
+    			
+    			if( howmanyStepsMultiStep > 0){
+    	    		System.out.println("accept operator " + operatorSelect+" with overall % accept = " + acceptNum[operatorSelect]/(acceptNum[operatorSelect] + rejectNum[operatorSelect]));
+
+    				System.out.println("   > with step = " + howmanyStepsMultiStep  + " (now only print this if step >0)");
+    	  			System.out.print(" dist=[" );
+        			for(int i=0; i <= maxNodeLevel; i++){
+        				System.out.print(acceptNumStep[i]/(acceptNumStep[i] + rejectNumStep[i]) + ",\t ");
+        			}
+        			System.out.println("]");
+    			}
+  
+    		}
+    		*/
+    	
+    		/*
+          	if(operatorSelect == 15){
+          		acceptDistance[ (int)  (muDistance)]++;
+          	}
+          	*/
+    }
+    
+    public void reject(){
+    	super.reject();
+    	
+    	//hotNodeProcedure();
+    	
+    	if(moveCounter > BURN_IN){
+    		rejectNum[operatorSelect]++;
+    	}
+    	printAcceptance();
+    	
+    	//printActiveNodes();
+    	
+    //obsolete - to see which multistep gets acccepted	
+	//	if(operatorSelect ==2){
+	//		rejectNumStep[ howmanyStepsMultiStep]++;
+	//	}
+    	/*
+      	if(operatorSelect == 15){
+      		rejectDistance[ (int) (muDistance)]++;
+      	}
+      	*/
+
+    }
+    
+	
+
+
+              
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return TREE_CLUSTERALGORITHM_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+        
+           
+        
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+            	
+
+                public final static String VIRUSLOCATIONS = "virusLocations";
+                public final static String SERUMLOCATIONS = "serumLocations";
+
+            	public final static String  MU = "mu";
+            	public final static String CLUSTERLABELS = "clusterLabels";
+            	//public final static String K = "k";
+            	public final static String OFFSETS = "offsets";
+      //     	public final static String LOCATION_DRIFT = "locationDrift"; //no longer need
+            	
+                public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
+                
+            	public final static String INDICATORS = "indicators";
+
+                public final static String EXCISION_POINTS = "excisionPoints";
+
+                public final static String MUPRECISION = "muPrecision";
+
+                
+                public final static String FILE_NAME = "fileName";
+
+                public final static String CLUSTERLABELSTREENODE = "clusterLabelsTreeNodes";
+                public final static String VIRUSLOCATIONSTREENODE = "virusLocationsTreeNodes";
+                
+                
+                public final static String MU1SCALE = "mu1Scale";
+                public final static String MU2SCALE = "mu2Scale";
+                public final static String MUMEAN = "muMean";
+                
+                public String getParserName() {
+                    return TREE_CLUSTERALGORITHM_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+                	
+                    String fileName = xo.getStringAttribute(FILE_NAME);
+                    DataTable<String[]> proposalWeightTable;
+                    try {
+                    	proposalWeightTable = DataTable.Text.parse(new FileReader(fileName), false, false);
+                    	
+                    	                    	
+                    } catch (IOException e) {
+                        throw new XMLParseException("Unable to read proposal weight from file: " + e.getMessage());
+                    }
+                    System.out.println("Loaded proposal weight table file: " + fileName);
+
+                    
+                	//System.out.println("Parser run. Exit now");
+                	//System.exit(0);
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+                    
+                    XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                        MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+                        
+                        
+                        cxo = xo.getChild(VIRUSLOCATIONSTREENODE);
+                        MatrixParameter virusLocationsTreeNode = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(SERUMLOCATIONS);
+                        MatrixParameter serumLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        
+                        cxo = xo.getChild(MU);
+                        MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(CLUSTERLABELSTREENODE);
+                        Parameter clusterLabelsTreeNode = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(CLUSTERLABELS);
+                        Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                        //cxo = xo.getChild(K); //to be deleted
+                        //Parameter k = (Parameter) cxo.getChild(Parameter.class); //to be deleted
+                        
+                        
+          //              cxo = xo.getChild(OFFSETS);
+           //             Parameter offsets = (Parameter) cxo.getChild(Parameter.class);
+                        Parameter offsets = null;
+                        
+//                        cxo = xo.getChild(LOCATION_DRIFT);
+//                        Parameter locationDrift = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        Parameter clusterOffsetsParameter = null;
+                //        if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+                 //       	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+                  //      }
+
+                        cxo = xo.getChild(INDICATORS);
+                        Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(MUPRECISION);
+                        Parameter muPrecision = (Parameter) cxo.getChild(Parameter.class);
+                      
+                      
+                        
+                        cxo = xo.getChild(MU1SCALE);
+                        Parameter mu1Scale = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(MU2SCALE);
+                        Parameter mu2Scale = (Parameter) cxo.getChild(Parameter.class);
+                        
+                    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+                    	
+                    	//AGLikelihoodTreeCluster agLikelihood = (AGLikelihoodTreeCluster) xo.getChild(AGLikelihoodTreeCluster.class);
+                    AntigenicLikelihood agLikelihood = (AntigenicLikelihood) xo.getChild(AntigenicLikelihood.class);
+
+                    	cxo = xo.getChild(MUMEAN);
+                    	Parameter muMean = (Parameter) cxo.getChild(Parameter.class);
+                        
+                    //return new ClusterAlgorithmOperator(virusLocations, mu, clusterLabels, k, weight, offsets, locationDrift, clusterOffsetsParameter);
+                  //      return new TreeClusterAlgorithmOperator(virusLocations, serumLocations, mu, clusterLabels, k, weight, offsets,  clusterOffsetsParameter, indicators, treeModel, agLikelihood);
+                    return new TreeClusterAlgorithmOperator(virusLocations, virusLocationsTreeNode, serumLocations, mu,  clusterLabels, weight,  indicators, treeModel, agLikelihood, muPrecision, proposalWeightTable, clusterLabelsTreeNode, mu1Scale, mu2Scale, muMean);
+                    
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "tree cluster algorithm's main operator.";
+                }
+
+                public Class getReturnType() {
+                    return TreeClusterAlgorithmOperator.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                        
+                        new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                        new ElementRule(SERUMLOCATIONS, Parameter.class),
+                        new ElementRule(MU, Parameter.class),
+                        new ElementRule(CLUSTERLABELS, Parameter.class),
+                        //new ElementRule(K, Parameter.class), //to be deleted
+                  //      new ElementRule(OFFSETS, Parameter.class),
+                  //      new ElementRule(LOCATION_DRIFT, Parameter.class), //no longer needed
+   //                    
+                  //     new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),  // no longer REQUIRED
+                       new ElementRule(INDICATORS, Parameter.class),
+                       new ElementRule(EXCISION_POINTS, Parameter.class),
+                       new ElementRule(TreeModel.class),
+                       new ElementRule(MUPRECISION, Parameter.class),
+                       new ElementRule(CLUSTERLABELSTREENODE, Parameter.class),
+                       new ElementRule(MU1SCALE, Parameter.class),
+                       new ElementRule(MU2SCALE, Parameter.class),
+                       new ElementRule(MUMEAN, Parameter.class),
+                       new ElementRule(VIRUSLOCATIONSTREENODE, MatrixParameter.class),
+                       AttributeRule.newStringRule(FILE_NAME, false, "The name of the file containing the assay table"),
+        
+            };
+            
+            };
+
+
+        
+            public int getStepCount() {
+                return 1;
+            }
+
+        }
+
+
+
+        
+
+/*
+ 
+	private void setClusterLabelsMaxIndex() {
+
+        int numNodes = treeModel.getNodeCount();
+
+		int K_int =0;
+		for(int i=0; i < numNodes; i++){
+			if( (int) indicators.getParameterValue(i) == 1){
+				K_int++;
+			}
+		}
+		
+        int[] cutNodes = new int[K_int];
+ 	   int cutNum = 0;
+ 	   String content = "";
+        for(int i=0; i < numNodes; i++){
+     	   if( (int) indicators.getParameterValue( i ) ==1 ){
+     		   cutNodes[cutNum] = i;
+     		   content +=  i +  ",";
+     		   cutNum++;
+     	   }
+     	  
+        }
+     //   System.out.println(content + " K_int=" + K_int);
+        
+        int []membership = determine_membership(treeModel, cutNodes, K_int);
+        
+        double uniqueCode = 0;
+        for(int i=0; i < numNodes; i++){
+        	uniqueCode += membership[i]*i;
+        }
+      //  System.out.println(" sum = " + uniqueCode);
+        
+     //   System.out.println("number of nodes = " + treeModel.getNodeCount());
+      //  for(int i=0; i < treeModel.getNodeCount(); i++){
+     //	   System.out.println(membership[i]);
+      //  }
+        
+        
+        //System.out.println("Done");
+        
+      //  for(int i=0; i < numdata; i++){
+ 	//	   Parameter v = virusLocations.getParameter(i);
+ 	//	   String curName = v.getParameterName();
+ 	//	   System.out.println("i=" + i + " = " + curName);       
+ 	//	}       
+        
+      //  for(int j=0; j < numdata; j++){
+     //	   System.out.println("j=" + j + " = " + treeModel.getTaxonId(j));
+      //  }
+        
+        
+ 	//   Parameter vv = virusLocations.getParameter(0);
+ 	 //  String curNamev = vv.getParameterName();
+ 	   
+ 	 //  System.out.println(curNamev + " and " +treeModel.getTaxonId(392) );
+ 	   //System.out.println(  curNamev.equals(treeModel.getTaxonId(392) )  );
+ 	   
+        
+        //System.exit(0);
+        
+ 	  // System.out.println("numNodes=" + numNodes);
+ 	  // System.exit(0);
+        //create dictionary:
+ 	   
+ 	   //I suspect this is an expensive operation, so I don't want to do it many times,
+ 	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+        int []membershipToClusterLabelIndexes = new int[numdata]; 
+        for(int i=0; i < numdata; i++){
+ 		   Parameter v = virusLocations.getParameter(i);
+ 		   String curName = v.getParameterName();
+ 		  // System.out.println(curName);
+ 		   int isFound = 0;
+     	   for(int j=0; j < numNodes; j++){
+     		   String treeId = treeModel.getTaxonId(j);
+     		   if(curName.equals(treeId) ){
+     		//	   System.out.println("  isFound at j=" + j);
+     			   membershipToClusterLabelIndexes[i] = j;
+     			   isFound=1;
+     			   break;
+     		   }
+     		   
+     	   }
+     	   if(isFound ==0){
+     		   System.out.println("not found. Exit now.");
+     		   System.exit(0);
+     	   }     	   
+        }
+        
+
+        
+        
+//        int anotherCount = 0;
+ //       for(int i=0; i < 20; i++){
+  //      	if( ((int) excisionPoints.getParameterValue(i)) ==1)
+   //     	anotherCount ++;
+  //      }
+//        System.out.println();
+   //     int maxLabel=0;
+    //    for(int i=0; i < numdata; i++){
+     //   	if(maxLabel < membership[membershipToClusterLabelIndexes[i]]){
+      //  		maxLabel = membership[membershipToClusterLabelIndexes[i]];
+       // 	}
+       // }
+        
+       // System.out.println("anotherCount=" + anotherCount + " K_int = " + K_int + " max label=" + maxLabel);
+        
+        
+       // System.exit(0);
+        
+      //  for(int i=0; i < numdata; i++){
+     //	   System.out.println(membershipToClusterLabelIndexes[i]);
+      //  }
+       // System.exit(0);
+        
+        for(int i=0; i < numdata; i++){
+     	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+     	   //so I have to search for the matching indexes
+     	   Parameter vloc = virusLocations.getParameter(i);
+  
+     	   
+//must uncomment out because this sets the new partitioning ... now i am doing code testing.     	   
+     	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+     	   //System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+     	   
+     	 //  Parameter v = virusLocations.getParameter(i);
+     	  // System.out.println(v.getParameterName());
+        }
+        
+
+    	
+	}
+
+
+ 
+ */
+
+/*
+  
+
+	private void resetStatusAndBreakpointsGivenCutNodes(int[] testCutNode, int[] onPoints) {
+	
+		for(int i=0; i < numNodes; i++){
+			indicators.setParameterValue(i, 0);
+		}
+			
+	 	int numOn = testCutNode.length;
+	 	int countOn=0;
+	 	for(int i=0; i < onPoints.length; i++){
+			if(countOn < numOn){
+				indicators.setParameterValue(onPoints[countOn], 1);
+				countOn++;
+	 		}
+	 		
+	 	}
+	}
+
+	
+
+	private int[] setClusterLabelsByTestCutNodeByNodeOrder(int[] testCutNode) {
+        int []membership = determine_membershipByNodeOrder(treeModel, testCutNode, testCutNode.length);  // the time consuming step here.
+        
+  	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+  	   //so I have to search for the matching indexes
+       // for(int i=0; i < numdata; i++){
+     	//   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+       //}
+
+        //to speed up the code
+		int[] clusterLabel = new int[numdata];
+
+        for(int i=0; i < numdata; i++){
+        	clusterLabel[i] =  membership[membershipToClusterLabelIndexes[i]];
+        }
+        return(clusterLabel);
+	}
+    
+ */
+        
+        
+        //
+        
+        //THE OLD MULTI-STEP
+        /*
+		else if(operatorSelect == 2){
+//			else if(chooseOperator > 0.3){
+				
+
+				
+				//new move... move the node up and down, with the hope that it promotes mixing.
+			 		
+		  		int isOn = 0;
+		  		int I_selected = -1;
+		  		while(isOn == 0){
+		  			I_selected = (int) (Math.floor(Math.random()*binSize));
+		  			isOn = (int) status.getParameterValue(I_selected);  			
+		  		}    	  		
+		  		
+		 		
+		 		
+		 		
+					//Determining the number of steps from the original site
+					int []numStepsFromOrigin = new int[numNodes];
+					for(int i=0; i < numNodes; i++){
+						numStepsFromOrigin[i] = 100000;
+					}
+		 		
+		  		//System.out.println("Excision point = " + excisionPoints.getParameterValue(I_selected));
+
+		  		
+		  			int curElementNumber =(int) breakPoints.getParameterValue(I_selected);
+		  			
+		  			//curElementNumber = 700;//testing purpose
+		  			//maxNodeLevel= 10000000;
+		  			
+		  			
+		  			int rootElementNumber = curElementNumber;
+		  			//System.out.println("curElementNumber=" + curElementNumber);
+		  		
+		  			
+		  			NodeRef curElement = treeModel.getNode(curElementNumber); 
+		  			
+		  		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+		  		    LinkedList<NodeRef> fromlist = new LinkedList<NodeRef>();
+		  		    LinkedList<Integer> nodeLevel = new LinkedList<Integer>();
+		  		    
+		  		    LinkedList<Integer> possibilities = new LinkedList<Integer>();
+		  		    
+		  		    NodeRef dummyNode = null;
+		  		    visitlist.add(curElement);
+		  		    fromlist.add(dummyNode);
+		  		    nodeLevel.add(new Integer(0));
+		  		    
+		  		    //int xcount=0;
+		  		  //System.out.println("root node " + curElement.getNumber());
+	 		    while(visitlist.size() > 0){
+	 		    	//xcount++;
+				
+		  			if(treeModel.getParent(curElement) != null){
+		  				//add parent
+	  		  			NodeRef node= treeModel.getParent(curElement);	  		  			
+		  				if(fromlist.getFirst() != node){
+		  					if( nodeLevel.getFirst() < maxNodeLevel){
+		  						visitlist.add(node);
+		  		  				fromlist.add(curElement);
+		  		  				nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+		  		  				//System.out.println("node " +  node.getNumber() + " added, parent of " + curElement.getNumber());
+		  					}
+		  				}
+		  			}
+
+	  			
+		  			for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+		  				NodeRef node= treeModel.getChild(curElement,childNum);
+		  				if(fromlist.getFirst() != node){
+		  					if( nodeLevel.getFirst() < maxNodeLevel){
+		  						visitlist.add(node);
+		  						fromlist.add(curElement);
+		  						nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+		  						//System.out.println("node " +  node.getNumber() + " added, child of " + curElement.getNumber());
+		  					}
+		  				}
+		  	        }
+		  			
+
+
+		  			//System.out.println("visited " + curElement.getNumber());
+		  			//test if I can add curElement.getNumber()
+		  				int site_test = curElement.getNumber();
+			  			int hasBeenAdded=0;
+			  			for(int i=0; i < binSize; i++){
+			  				if( breakPoints.getParameterValue(i) == site_test){
+			  					hasBeenAdded=1;
+			  					break;
+			  				}
+			  			}
+			  			if(hasBeenAdded==0 || curElement.getNumber() == rootElementNumber ){
+			  				//System.out.println("to possibilities: add " + site_test);
+				  			numStepsFromOrigin[site_test] = nodeLevel.getFirst();
+			  				possibilities.addLast(new Integer( site_test));
+			  			}
+			  			else{
+			  				//System.out.println("element " + curElement.getNumber() + " is already an excision point");
+			  			}
+			  			
+			  			visitlist.pop();
+			  			fromlist.pop();
+			  			nodeLevel.pop();
+
+		  			if(visitlist.size() > 0){
+		  				curElement = visitlist.getFirst();
+		  			}
+		  			
+		  			
+				}
+		  					    
+	 		    
+	 		    //System.out.println("# visit = " + xcount);
+	 		    //System.exit(0);
+	 		    
+	 		   // for(int i=0; i < possibilities.size(); i++){
+	 		   // 	System.out.println(possibilities.get(i));
+	 		    //}
+	 		    //System.out.println(possibilities.size());
+	//System.exit(0);
+		  			int numPossibleMoves = possibilities.size();  // this number may be different from the max number because some moves may not be legal.
+		 			//System.out.print("  num possible moves = " + numPossibleMoves + "\t[");
+		  			//for(int i=0; i < numPossibleMoves; i++){
+		  			//	System.out.print( possibilities.get(i) + ",");
+		  			//}
+		  			//System.out.println("]");
+		  			//create a list of possible moves
+			 		
+		  			
+		  			int whichMove = (int) (Math.floor(Math.random()*numPossibleMoves));
+
+		  			
+		  			//for(int i=0; i < numPossibleMoves; i++){
+		  				//System.out.println(movePossibilities[i]);
+		  			//}
+		  			
+
+		  			//
+		  			int site_add = possibilities.get(whichMove);
+		  			breakPoints.setParameterValue(I_selected, site_add);
+		  			
+		  			
+		  			howmanyStepsMultiStep = numStepsFromOrigin[site_add];
+		  		   
+		  			
+		  	//    	System.out.println("Chose " + site_add + "  (steps=" + howmanyStepsMultiStep + ")");  		
+		//  			System.out.println("propose from " + curElementNumber + " to " + site_add);
+		  		
+		  		selectedI = I_selected;
+		  		
+		  		
+		  		//calculate the MH requires me to know the (number of) backward moves...
+	  			//System.out.println("curElementNumber=" + curElementNumber);
+		  		
+	  			curElementNumber = site_add;
+	  			rootElementNumber = curElementNumber;
+	  			//System.out.println("curElementNumber=" + curElementNumber);
+	  			curElement = treeModel.getNode(curElementNumber); 
+	  			
+	  		    visitlist = new LinkedList<NodeRef>();
+	  		    fromlist = new LinkedList<NodeRef>();
+	  		    nodeLevel = new LinkedList<Integer>();
+	  		    
+	  		    possibilities = new LinkedList<Integer>();
+	  		    
+	  		    dummyNode = null;
+	  		    visitlist.add(curElement);
+	  		    fromlist.add(dummyNode);
+	  		    nodeLevel.add(new Integer(0));
+	  		    
+	  		    
+	  		  //System.out.println("root node " + curElement.getNumber());
+			    while(visitlist.size() > 0){
+			
+	  			if(treeModel.getParent(curElement) != null){
+	  				//add parent
+			  			NodeRef node= treeModel.getParent(curElement);	  		  			
+	  				if(fromlist.getFirst() != node){
+	  					if( nodeLevel.getFirst() <= maxNodeLevel){
+	  						visitlist.add(node);
+	  		  				fromlist.add(curElement);
+	  		  				nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+	  		  				//System.out.println("node " +  node.getNumber() + " added, parent of " + curElement.getNumber());
+	  					}
+	  				}
+	  			}
+
+				
+	  			for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	  				NodeRef node= treeModel.getChild(curElement,childNum);
+	  				if(fromlist.getFirst() != node){
+	  					if( nodeLevel.getFirst() <= maxNodeLevel){
+	  						visitlist.add(node);
+	  						fromlist.add(curElement);
+	  						nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+	  						//System.out.println("node " +  node.getNumber() + " added, child of " + curElement.getNumber());
+	  					}
+	  				}
+	  	        }
+	  			
+
+	  			//System.out.println("visited " + curElement.getNumber());
+	  			//test if I can add curElement.getNumber()
+	  				int site_test = curElement.getNumber();
+		  			int hasBeenAdded=0;
+		  			for(int i=0; i < binSize; i++){
+		  				if( breakPoints.getParameterValue(i) == site_test){
+		  					hasBeenAdded=1;
+		  					break;
+		  				}
+		  			}
+		  			if(hasBeenAdded==0 || curElement.getNumber() == rootElementNumber ){
+		  				//System.out.println("to possibilities: add " + site_test);
+		  				possibilities.addLast(new Integer( site_test));
+		  			}
+		  			else{
+		  				//System.out.println("element " + curElement.getNumber() + " is already an excision point");
+		  			}
+		  			
+
+		  			visitlist.pop();
+		  			fromlist.pop();
+		  			nodeLevel.pop();
+
+	  			
+	  			if(visitlist.size() > 0){
+	  				curElement = visitlist.getFirst();
+	  			}
+	  			
+	  			
+			}
+
+			    
+			   // for(int i=0; i < possibilities.size(); i++){
+			   // 	System.out.println(possibilities.get(i));
+			    //}
+			    //System.out.println(possibilities.size());
+	//System.exit(0);
+		  		
+		  		int newStatenumPossibleMoves = possibilities.size();  // this number may be different from the max number because some moves may not be legal.
+	  			//System.out.println("num new states moves = " + newStatenumPossibleMoves);
+		  		//System.out.println("#  possibilities = " + numPossibleMoves);
+		  		//System.out.println("# new possibilities = " + newStatenumPossibleMoves);
+
+		  		
+		  		
+		  		
+	//System.exit(0);
+		  		
+		  		logHastingRatio = Math.log( (1/ (double)newStatenumPossibleMoves) / (1/ (double)numPossibleMoves)  );
+		  		//System.out.println("log hasting ratio = " + logHastingRatio);	
+	  			//System.exit(0);
+		  		
+		  		
+		  		
+		  		
+		  		
+		  		
+		  		
+	//logHastingRatio = 100000;	  		//for testing only - the move should always be accepted
+//		  		System.out.println("treeClusterAlg's MH ratio =" + Math.exp(logHastingRatio));
+ }
+		  		*/
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muMeanTranslateInactiveMu1Operator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muMeanTranslateInactiveMu1Operator.java
new file mode 100644
index 0000000..5fc360f
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muMeanTranslateInactiveMu1Operator.java
@@ -0,0 +1,219 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class muMeanTranslateInactiveMu1Operator extends AbstractCoercableOperator   {
+
+	
+    private MatrixParameter mu = null;
+    private Parameter muMean = null;   
+
+    private Parameter indicators;
+    
+	
+    private double windowSize = 0.5;
+
+	
+	public muMeanTranslateInactiveMu1Operator(double weight,  MatrixParameter mu, Parameter indicators, Parameter muMean, double windowSize){
+  
+        super(CoercionMode.COERCION_ON);
+
+		setWeight(weight);
+        this.mu = mu;
+        this.indicators = indicators;
+        this.muMean = muMean;
+		this.windowSize = windowSize;
+
+
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+       System.out.println("run here stop");
+       System.exit(0);
+		//unbounded walk
+        double change = (2.0 * MathUtils.nextDouble() - 1.0) * windowSize;
+        
+		//change mu1Scale
+
+		double original_muMean_Val = muMean.getParameterValue(0);
+   	    double new_muMean_Val = change + original_muMean_Val;
+		muMean.setParameterValue(0, new_muMean_Val);
+		
+		//translate all the inactive mean mu
+		int numNodes = mu.getColumnDimension();
+		//make sure all the active mu's first dimension stays intact 
+		for(int i=0; i < numNodes; i++){
+			if( (int) indicators.getParameterValue(i) == 0){
+				double oldValue = mu.getParameter(i).getParameterValue(0);
+				double newValue =  oldValue +change;
+				mu.getParameter(i).setParameterValue(0, newValue);
+			}
+		}
+		
+		return 0;
+
+	}
+	
+	
+	
+
+
+	 //MCMCOperator INTERFACE
+   public double getCoercableParameter() {
+       return Math.log(windowSize);
+   }
+
+   public void setCoercableParameter(double value) {
+       windowSize = Math.exp(value);
+   }
+
+   public double getRawParameter() {
+       return windowSize;
+   }
+
+   public double getTargetAcceptanceProbability() {
+       return 0.234;
+   }
+
+   public double getMinimumAcceptanceLevel() {
+       return 0.1;
+   }
+
+   public double getMaximumAcceptanceLevel() {
+       return 0.4;
+   }
+
+   public double getMinimumGoodAcceptanceLevel() {
+       return 0.20;
+   }
+
+   public double getMaximumGoodAcceptanceLevel() {
+       return 0.30;
+   }
+
+   public final String getPerformanceSuggestion() {
+
+       double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+       double targetProb = getTargetAcceptanceProbability();
+
+       double ws = OperatorUtils.optimizeWindowSize(windowSize, prob, targetProb);
+
+       if (prob < getMinimumGoodAcceptanceLevel()) {
+           return "Try decreasing windowSize to about " + ws;
+       } else if (prob > getMaximumGoodAcceptanceLevel()) {
+           return "Try increasing windowSize to about " + ws;
+       } else return "";
+   }
+
+	
+	
+    public final static String muMeanTranslateInactiveMu1OperatorStr = "muMeanTranslateInactiveMu1Operator";
+
+    public final String getOperatorName() {
+        return muMeanTranslateInactiveMu1OperatorStr;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+
+    	public final static String  MU = "mu";
+    	public final static String  MUMEAN = "muMean";       
+    	public final static String INDICATORS = "indicators";
+    	public final static String WINDOWSIZE = "windowSize";
+
+        public String getParserName() {
+            return muMeanTranslateInactiveMu1OperatorStr;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double windowSize = xo.getDoubleAttribute(WINDOWSIZE);
+            
+                XMLObject cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(MUMEAN);
+                Parameter muMean = (Parameter) cxo.getChild(Parameter.class);
+
+            return new muMeanTranslateInactiveMu1Operator(weight, mu, indicators, muMean, windowSize);
+        	
+
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes mu1Scale and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return muMeanTranslateInactiveMu1Operator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(WINDOWSIZE),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(MUMEAN, Parameter.class),
+
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muPrecisionInactiveMuOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muPrecisionInactiveMuOperator.java
new file mode 100644
index 0000000..a337626
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/muPrecisionInactiveMuOperator.java
@@ -0,0 +1,224 @@
+
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class muPrecisionInactiveMuOperator extends AbstractCoercableOperator {
+
+	
+   
+    private Parameter muPrecision = null;   
+    private MatrixParameter mu = null;
+    private Parameter indicators = null;
+    private double scaleFactor;
+    private Parameter muMean = null;
+
+	public muPrecisionInactiveMuOperator(double weight, MatrixParameter mu, Parameter muPrec,  double scale, Parameter indicators, Parameter muMean){
+    
+        super(CoercionMode.COERCION_ON);
+		
+		setWeight(weight);
+        this.mu = mu;
+        this.muPrecision = muPrec;
+		this.scaleFactor = scale;
+		this.indicators = indicators;
+		this.muMean = muMean;
+	
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+		
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+ 
+		double original_muPrec_Val = muPrecision.getParameterValue(0);
+		double new_muPrec_Val = scale * original_muPrec_Val;
+	   
+	   
+		muPrecision.setParameterValue(0, new_muPrec_Val);
+		 
+		
+		for(int i=0; i < mu.getColumnDimension(); i++){
+			
+			//only change the inactive mus
+			if(indicators.getParameterValue(i) == 0){
+				for(int j=0; j < 2; j++){
+					if(j==0){
+						double translatedValue = (mu.getParameter(i).getParameterValue(j) - muMean.getParameterValue(0) );				
+						double newValue =  translatedValue *  Math.sqrt( original_muPrec_Val/new_muPrec_Val) + muMean.getParameterValue(0); 
+						mu.getParameter(i).setParameterValue(j, newValue);
+					}
+					else{
+						double oldValue = mu.getParameter(i).getParameterValue(j);				
+						double newValue =  oldValue *  Math.sqrt( original_muPrec_Val/new_muPrec_Val); 
+						mu.getParameter(i).setParameterValue(j, newValue);
+					}
+				}
+			}
+
+		}
+		
+		
+        double logq = -Math.log(scale);
+        return logq;
+	}
+	
+	
+	
+
+
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+
+    
+    public final static String CLASSNAME = "muPrecisionInactiveMuOperator";
+
+    public final String getOperatorName() {
+        return CLASSNAME;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String MU = "mu";
+    	public final static String  MUPREC = "muPrec";       
+
+    	public final static String SCALE = "scaleFactor";
+
+    	public final static String MUMEAN = "muMean";
+    	public final static String INDICATORS = "indicators";
+
+
+        public String getParserName() {
+            return CLASSNAME;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);
+
+            
+            XMLObject cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+    
+                cxo = xo.getChild(MUPREC);
+                Parameter muPrec = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                
+                cxo = xo.getChild(MUMEAN);
+                Parameter muMean = (Parameter) cxo.getChild(Parameter.class);
+                
+            return new muPrecisionInactiveMuOperator(weight, mu, muPrec,  scale, indicators, muMean);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes serum drift and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return muPrecisionInactiveMuOperator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(MUPREC, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(MUMEAN, Parameter.class)
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/randomWalkSerumDriftAndMu.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/randomWalkSerumDriftAndMu.java
new file mode 100644
index 0000000..b1266ec
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/randomWalkSerumDriftAndMu.java
@@ -0,0 +1,232 @@
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.util.DataTable;
+import dr.xml.*;
+
+
+/**
+ * An operator to cluster viruses using a phylogenetic tree
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class randomWalkSerumDriftAndMu extends SimpleMCMCOperator  {
+	
+	
+    public static final String SERUMDRIFT_AND_MU_OPERATOR = "SerumDriftAndMuOperator";
+    
+    
+	//Variables
+    Parameter indicators;
+    MatrixParameter mu; //mu - means
+    Parameter serumDrift;	
+    private TreeModel treeModel;
+
+    private double maxWalkSize;
+	 
+    //Constructor
+    public randomWalkSerumDriftAndMu(	MatrixParameter mu, 
+    									double weight, 
+    									Parameter indicatorsParameter, 
+    									Parameter serumDrift_in,
+    									double max_walk_size_in,
+    									TreeModel treeModel_in) {
+    	this.mu = mu;
+    	this.indicators = indicatorsParameter;
+       	this.serumDrift = serumDrift_in;
+       	this.maxWalkSize = max_walk_size_in;
+		this.treeModel= treeModel_in;
+
+        setWeight(weight);
+        
+        System.out.println("Finished loading the constructor for SERUMDRIFT_AND_MU_OPERATOR");
+    	
+    }
+    
+
+    
+ 
+    /**
+     * change the parameter and return the log hastings ratio.
+     */
+    public final double doOperation() {
+
+    	double logHastingRatio = 0; //initiate the log Metropolis Hastings ratio of the MCMC
+
+    	
+    	int rootNode  = treeModel.getRoot().getNumber();
+    	//perform proposal
+    	
+    	//random walk serum drift 1
+		double change = Math.random()*maxWalkSize- maxWalkSize/2 ; 
+		double originalValue = serumDrift.getParameterValue(0);
+		double newValue = originalValue + change;
+		serumDrift.setParameterValue(0, newValue);
+		
+		for(int i=0; i < mu.getParameterCount(); i++){
+			if( (int) indicators.getParameterValue(i) == 1  && i != rootNode ){
+				Parameter curMu = mu.getParameter(i);
+				double originalMu0 = curMu.getParameterValue(0);
+				double newMu0 = originalMu0 * newValue/originalValue;
+				curMu.setParameterValue(0, newMu0);
+			}
+		}
+    	
+			
+    	return(logHastingRatio);    	
+    }
+    	
+
+	  
+	public void accept(double deviation) {
+    	super.accept(deviation);         	
+    }
+    
+    public void reject(){
+    	super.reject();
+    }
+    
+	
+
+             
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return SERUMDRIFT_AND_MU_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+        
+           
+        
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+            	
+
+            	public final static String  MU = "mu";
+            	public final static String SERUMDRIFT = "serumDrift";
+            	public final static String INDICATORS = "indicators";
+            	public final static String WALKSIZE = "walkSize";
+                
+                public String getParserName() {
+                    return SERUMDRIFT_AND_MU_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+                    double walk_size = 0.05;
+                	if (xo.hasAttribute(WALKSIZE)) {
+                		walk_size = xo.getDoubleAttribute(WALKSIZE);
+                	}
+                    
+                    XMLObject cxo =  xo.getChild(MU);
+                        MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(SERUMDRIFT);
+                        Parameter serumDrift = (Parameter) cxo.getChild(Parameter.class);
+
+                        cxo = xo.getChild(INDICATORS);
+                        Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                        TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+
+                        return new randomWalkSerumDriftAndMu(mu,  weight,  indicators, serumDrift, walk_size, treeModel);
+                    
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "An operator that picks a new allocation of an item to a cluster under the Dirichlet process.";
+                }
+
+                public Class getReturnType() {
+                    return TreeClusterAlgorithmOperator.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                        AttributeRule.newDoubleRule(WALKSIZE),
+                        new ElementRule(MU, Parameter.class),
+                        new ElementRule(SERUMDRIFT, Parameter.class),
+                       new ElementRule(INDICATORS, Parameter.class),
+                       new ElementRule(TreeModel.class),
+                };
+            
+            };
+
+
+        }
+
+
+
+        
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumDriftActiveScaledMu1Operator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumDriftActiveScaledMu1Operator.java
new file mode 100644
index 0000000..965cf27
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumDriftActiveScaledMu1Operator.java
@@ -0,0 +1,289 @@
+
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class serumDriftActiveScaledMu1Operator extends AbstractCoercableOperator {
+
+	
+    private MatrixParameter mu = null;
+    private Parameter serumDrift = null;   
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter virusLocationsTreeNode = null;
+    private Parameter indicators;
+    private TreeModel treeModel;
+    
+    private int numdata;   
+    private int numNodes;
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+
+	private double scaleFactor;
+
+	
+	public serumDriftActiveScaledMu1Operator(double weight, MatrixParameter virusLocations, MatrixParameter mu, Parameter indicators, Parameter serumDrift, TreeModel treeModel_in, double scale, MatrixParameter virusLocationsTreeNode_in){
+    
+        super(CoercionMode.COERCION_ON);
+		
+		setWeight(weight);
+        this.virusLocations = virusLocations;
+        this.mu = mu;
+        this.indicators = indicators;
+        this.serumDrift = serumDrift;
+		this.treeModel= treeModel_in;
+		this.scaleFactor = scale;
+		this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+		
+		numNodes = treeModel.getNodeCount();
+		numdata = virusLocations.getColumnDimension();
+
+		correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+		
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+
+ 
+		//change serum drift
+		//double WALK_SIZE = 0.2; //when the walk size becomes 0.5, .... things become weird.. too big
+		//double change = Math.random()*WALK_SIZE- WALK_SIZE/2 ;
+		double original_serumDrift_Val = serumDrift.getParameterValue(0);
+	//	System.out.println("original_serumDrift_Val = " + original_serumDrift_Val);
+		//double new_serumDrift_Val = change + original_serumDrift_Val;
+	   double new_serumDrift_Val = scale * original_serumDrift_Val;
+	   
+	   
+		serumDrift.setParameterValue(0, new_serumDrift_Val);
+		
+	//	System.out.println("new_serumDrift_Val=" + serumDrift.getParameterValue(0));
+		//make sure all the active mu's first dimension stays intact 
+		for(int i=0; i < numNodes; i++){
+			if( (int) indicators.getParameterValue(i) == 1){
+				double oldValue = mu.getParameter(i).getParameterValue(0);				
+				double newValue =  oldValue * new_serumDrift_Val/original_serumDrift_Val;
+				mu.getParameter(i).setParameterValue(0, newValue);
+	//			System.out.println("indicator" + i + "  oldValue = " + oldValue + " and newValue=" + mu.getParameter(i).getParameterValue(0));
+			}
+		}
+		
+		//the virus location needs to be updated because the mu's are updated 	  				
+    	Tree_Clustering_Shared_Routines.updateUndriftedVirusLocations(numNodes, numdata, treeModel, virusLocationsTreeNode, indicators, mu, virusLocations, correspondingTreeIndexForVirus);
+		
+		
+        double logq = -Math.log(scale);
+    //    System.out.println("logq=" + logq);
+    //    System.out.println("================================================");
+		//return 0;
+        return logq;
+	}
+	
+	
+	
+
+
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+/*	
+    public String getPerformanceSuggestion() {
+        if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+            return "";
+        } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+            return "";
+        } else {
+            return "";
+        }
+    }
+
+    public final void optimize(double targetProb) {
+
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public boolean isOptimizing() {
+        return false;
+    }
+
+    public void setOptimizing(boolean opt) {
+        throw new RuntimeException("This operator cannot be optimized!");
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+*/
+    
+    
+    
+    public final static String SERUMDRIFTACTIVESCALEDMU1Operator = "serumDriftActiveScaledMu1Operator";
+
+    public final String getOperatorName() {
+        return SERUMDRIFTACTIVESCALEDMU1Operator;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String VIRUSLOCATIONS = "virusLocations";
+        public final static String VIRUSLOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	public final static String  MU = "mu";
+    	public final static String  SERUMDRIFT = "serumDrift";       
+    	public final static String INDICATORS = "indicators";
+    	public final static String SCALE = "scaleFactor";
+
+
+        public String getParserName() {
+            return SERUMDRIFTACTIVESCALEDMU1Operator;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);
+
+            
+            XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+                cxo = xo.getChild(VIRUSLOCATIONSTREENODE);
+                MatrixParameter virusLocationsTreeNode = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+                
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(SERUMDRIFT);
+                Parameter serumDrift = (Parameter) cxo.getChild(Parameter.class);
+
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+            return new serumDriftActiveScaledMu1Operator(weight, virusLocations, mu, indicators, serumDrift, treeModel, scale, virusLocationsTreeNode);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes serum drift and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return serumDriftActiveScaledMu1Operator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                new ElementRule(VIRUSLOCATIONSTREENODE, MatrixParameter.class),
+                new ElementRule(MU, Parameter.class),
+               new ElementRule(INDICATORS, Parameter.class),
+               new ElementRule(SERUMDRIFT, Parameter.class),
+               new ElementRule(TreeModel.class),
+
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumPrecisionSerumLocOperator.java b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumPrecisionSerumLocOperator.java
new file mode 100644
index 0000000..3be85e6
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/MCMCOperators/serumPrecisionSerumLocOperator.java
@@ -0,0 +1,199 @@
+
+package dr.evomodel.antigenic.phyloClustering.MCMCOperators;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class serumPrecisionSerumLocOperator extends AbstractCoercableOperator {
+
+	
+   
+    private Parameter serumPrecision = null;   
+    private MatrixParameter serumLocations = null;
+
+    private double scaleFactor;
+
+	public serumPrecisionSerumLocOperator(double weight, MatrixParameter serumLocations, Parameter serumPrec,  double scale){
+    
+        super(CoercionMode.COERCION_ON);
+		
+		setWeight(weight);
+        this.serumLocations = serumLocations;
+        this.serumPrecision = serumPrec;
+		this.scaleFactor = scale;
+	
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+		
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+ 
+		double original_serumPrec_Val = serumPrecision.getParameterValue(0);
+		double new_serumPrec_Val = scale * original_serumPrec_Val;
+	   
+	   
+		serumPrecision.setParameterValue(0, new_serumPrec_Val);
+		 
+		
+		for(int i=0; i < serumLocations.getColumnDimension(); i++){
+				for(int j=0; j < 2; j++){
+					double oldValue = serumLocations.getParameter(i).getParameterValue(j);				
+					double newValue =  oldValue *  Math.sqrt( original_serumPrec_Val/new_serumPrec_Val); 
+					serumLocations.getParameter(i).setParameterValue(j, newValue);
+				}
+
+		}
+		
+		
+        double logq = -Math.log(scale);
+        return logq;
+	}
+	
+	
+	
+
+
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+
+    
+    public final static String SERUMPRECSCALEALLSERUMLOC = "SerumPrecScaleAllSerumLoc";
+
+    public final String getOperatorName() {
+        return SERUMPRECSCALEALLSERUMLOC;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+        public final static String SERUMLOCATIONS = "serumLocations";
+    	public final static String  SERUMPREC = "serumPrec";       
+
+    	public final static String SCALE = "scaleFactor";
+
+
+        public String getParserName() {
+            return SERUMPRECSCALEALLSERUMLOC;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);
+
+            
+            XMLObject cxo = xo.getChild(SERUMLOCATIONS);
+                MatrixParameter serumLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+               
+    
+                cxo = xo.getChild(SERUMPREC);
+                Parameter serumPrec = (Parameter) cxo.getChild(Parameter.class);
+
+            return new serumPrecisionSerumLocOperator(weight, serumLocations, serumPrec,  scale);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes serum drift and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return serumPrecisionSerumLocOperator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(SERUMLOCATIONS, Parameter.class),
+               new ElementRule(SERUMPREC, Parameter.class),
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/TreeClusteringVirusesPrior.java b/src/dr/evomodel/antigenic/phyloClustering/TreeClusteringVirusesPrior.java
new file mode 100644
index 0000000..cc4c009
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/TreeClusteringVirusesPrior.java
@@ -0,0 +1,1778 @@
+package dr.evomodel.antigenic.phyloClustering;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Set;
+import java.util.logging.Logger;
+
+import dr.evolution.datatype.Nucleotides;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.misc.obsolete.TiterImporter;
+import dr.evomodel.tree.TreeModel;
+import dr.evomodelxml.treelikelihood.TreeTraitParserUtilities;
+import dr.inference.model.AbstractModelLikelihood;
+import dr.inference.model.CompoundParameter;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Model;
+import dr.inference.model.Parameter;
+import dr.inference.model.Variable;
+import dr.inference.operators.MCMCOperator;
+import dr.math.GammaFunction;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.StringAttributeRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+/**
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+
+public class TreeClusteringVirusesPrior extends AbstractModelLikelihood {
+	
+
+    public static final String TREE_CLUSTER_VIRUSES = "TreeClusterViruses";
+    
+    private Parameter muPrecision;
+    private Parameter p_on;
+    private double initialK;
+    
+    private int startBase;
+    private int endBase;
+    private int numSites;
+    
+	//Variables
+    private Parameter indicators;
+    private Parameter probSites;
+    private MatrixParameter mu; //mu - means
+    private Parameter clusterLabels;     //C
+    private Parameter clusterLabelsTreeNode;
+    private MatrixParameter virusLocations = null;
+    private MatrixParameter virusLocationsTreeNode = null;
+
+    private Parameter muMean = null; //obsolete - to be removed
+    boolean hasDrift;
+    TreeModel treeModel;
+    
+    int numdata;
+    int []membershipToClusterLabelIndexes = null;        
+	private double mostRecentTransformedValue = 0;  //keep a copy of the most recent version of transformFactor, to keep track of whether the transformFactor has changed
+	private boolean treeChanged = false; //a flag that becomes true when treeModel changes
+
+
+    private String[] mutationString;
+    private LinkedList<Integer>[] mutationList ;
+    private LinkedList<Integer>[] causalList ;
+    private int[] causalCount;
+    private int[] nonCausalCount;
+
+    private Parameter siteIndicators;
+    private String gp_prior;
+    private double prob00=0.95;
+    private double prob11=0.5;
+
+
+    public LinkedList<Integer>[] getMutationList(){
+    	return(mutationList);
+    }
+    public LinkedList<Integer>[] getCausalList(){
+    	return(causalList);
+    }
+        
+	public TreeClusteringVirusesPrior (TreeModel treeModel_in,  
+		 				Parameter indicators_in, 
+		 				Parameter clusterLabels_in,
+		 				Parameter clusterLabelsTreeNode_in,
+		 				MatrixParameter mu_in, 
+		 				Boolean hasDrift, 
+		 				MatrixParameter virusLocations_in,
+		 				MatrixParameter virusLocationsTreeNode_in,
+		 				Parameter muPrecision, 
+		 				Parameter p_onValue	,
+		 				double initialK_in,
+		 				Parameter muMean,
+		 				Parameter probSites_in,
+		 				Parameter siteIndicators_in,
+		 				int startBase_in,
+		 				int endBase_in,
+		 				String gp_prior_in,
+		 				double prob00_in,
+		 				double prob11_in,
+		 				double initial_probSiteValue
+						){
+	 
+		super(TREE_CLUSTER_VIRUSES);	
+		System.out.println("loading the constructor for TreeClusterViruses");
+		
+		if(probSites_in == null){
+			System.out.println("Antigenic Clustering only");
+		}
+		else{
+			System.out.println("Antigenic Genotype to Phenotype model");
+		}
+
+		this.treeModel= treeModel_in;
+		
+		if(probSites_in != null){
+			 this.startBase = startBase_in;
+			 this.endBase = endBase_in;
+			 this.gp_prior = gp_prior_in;
+			 
+	 		System.out.println("gp prior = " + gp_prior);
+	 		if(gp_prior.compareTo("generic") == 0 ||gp_prior.compareTo("saturated") == 0||
+	 				gp_prior.compareTo("shrinkage") == 0||gp_prior.compareTo("correlated") == 0){
+	 		}
+	 		else{
+	 			System.out.println("Prior is incorrectly specified - choose from [generic/saturated/shrinkage/correlated]");
+	 			System.exit(0);
+	 		}
+
+			treeMutations(); //this routine also calculates the number of sites and checks for error
+			this.probSites = probSites_in;
+			this.siteIndicators = siteIndicators_in;
+			this.prob00 = prob00_in;
+			this.prob11 = prob11_in;
+	
+		}
+		//this.K = K_in; //to be deleted
+		this.indicators = indicators_in;
+
+		this.clusterLabels = clusterLabels_in;
+		this.clusterLabelsTreeNode = clusterLabelsTreeNode_in;
+		this.mu = mu_in;
+		
+		this.hasDrift=hasDrift;
+		this.virusLocations = virusLocations_in;
+		this.virusLocationsTreeNode = virusLocationsTreeNode_in;
+        
+		this.muPrecision = muPrecision;
+		this.p_on = p_onValue; // this is shared by the clustering-only   and also the saturated prior in genotype to phenotype
+		this.initialK = initialK_in;
+		this.muMean = muMean;
+				
+		System.out.println("sigmaSq = " + 1/muPrecision.getParameterValue(0));
+		System.out.println("p_on = " + p_onValue.getParameterValue(0));
+		
+		//numdata = offsets.getSize();
+		 int numNodes = treeModel.getNodeCount();
+         numdata = virusLocations_in.getColumnDimension();      
+        //initialize clusterLabels
+        clusterLabels.setDimension(numdata);
+         for (int i = 0; i < numdata; i++) {      	
+            clusterLabels.setParameterValue(i, 0);
+        }
+         addVariable(clusterLabels);
+         
+         //initialize excision points
+          indicators.setDimension(treeModel.getNodeCount());
+          for(int i=0; i < treeModel.getNodeCount(); i++){
+        	  indicators.setParameterValue(i, 0);
+          }
+          
+          
+
+ 		 //initialize with the specified number of initial clusters.
+ 		 indicators.setParameterValue(  treeModel.getRoot().getNumber() , 1 );
+ 		 for(int k=0; k< (initialK -1); k++){
+ 				//sample another one.
+ 				int sampleNew = 1;
+ 				while(sampleNew ==1){
+ 					int rSiteIndex = (int) (Math.floor(Math.random()*numNodes));
+ 					if( (int) indicators.getParameterValue(rSiteIndex) == 0){
+ 						//clustering only
+ 						if(probSites_in == null){
+ 							//success sampling
+ 							indicators.setParameterValue(rSiteIndex, 1);
+ 							sampleNew = 0;
+ 						}
+ 						//genotype to phenotype
+ 						else{
+ 							//added condition to avoid getting 0 probability:
+ 							if(mutationList[rSiteIndex] != null){				
+ 								//success sampling
+ 								indicators.setParameterValue(rSiteIndex, 1);
+ 								sampleNew = 0;
+ 							}
+ 						}//else
+ 					}
+ 				}
+ 				
+ 		}
+          
+         addVariable(indicators);
+         
+         //genotype to phenotype
+ 		if(probSites_in != null){
+	         //int numSites = 330;  //HARDCODED RIGHT NOW
+ 			if(gp_prior.compareTo("generic") == 0 ){
+ 				probSites.setDimension(1);
+ 				probSites.setParameterValue(0, initial_probSiteValue);
+ 			}
+ 			else{
+ 				probSites.setDimension(numSites); //initialize dimension of probSites
+ 		         //initialize the probability of each site
+ 		         //double initial_p = 0.05;
+ 		         for(int k=0; k < numSites; k++){
+ 		        	 //probSites.setParameterValue(k, initial_p);
+ 		        	 probSites.setParameterValue(k, initial_probSiteValue );
+ 		         }  
+ 			}
+	         addVariable(probSites);
+	    
+	    
+	         //MAYBE ONLY INITIALIZE IT IF IT USES THE SHRINKAGE OR correlated PRIOR?
+	         siteIndicators.setDimension(numSites);
+	         for(int k=0; k < numSites; k++){
+	        	 siteIndicators.setParameterValue(k, 1);
+	         }
+	         addVariable(siteIndicators);
+ 		}
+         
+          
+          clusterLabelsTreeNode.setDimension(treeModel.getNodeCount());
+          addVariable(clusterLabelsTreeNode);
+          
+         //initialize mu
+         mu.setColumnDimension(2);
+         mu.setRowDimension( treeModel.getNodeCount()  );  //have a fixed number, although in reality, only K of them are active
+
+         for(int i=0; i < (treeModel.getNodeCount() ); i++){
+        	 double zero=0;
+        	 mu.getParameter(i).setValue(0, zero);
+        	 mu.getParameter(i).setValue(1, zero);
+         }
+         
+         //adding the pre-clustering step.
+        // preClustering();
+
+		 addVariable(virusLocations);
+		 
+		 virusLocationsTreeNode.setColumnDimension(2);  //mds dimension is 2
+		 virusLocationsTreeNode.setRowDimension(numNodes);
+		 addVariable(virusLocationsTreeNode);
+		 
+		 addModel(treeModel);
+		 addVariable(mu);
+		 
+		 addVariable(muPrecision);
+		 addVariable(p_on);
+	
+		  numdata = virusLocations.getColumnDimension();
+	 
+		   //loadInitialMuLocations();
+		   //loadIndicators();
+	
+		   //int []membershipToClusterLabelIndexes = new int[numdata];
+		   //setMembershipToClusterLabelIndexes(); //run once in case the tree changes.
+		   //setClusterLabelsParameter();
+	 
+		  //loadIndicators();
+		 //System.out.println("Finished loading the constructor for TreeClusteringVirusesPrior");
+
+		 //genotype to phenotype
+		if(probSites_in != null){
+		 sampleCausativeStates();
+		 
+		 /*
+		 for(int i=0; i< treeModel.getNodeCount(); i++){
+			 System.out.print( (int)indicators.getParameterValue(i) + "\t");
+			 System.out.print("node i=" + i +":\t");
+			 if(mutationList[i] != null){
+		    	Iterator itr = mutationList[i].iterator();
+		    	Iterator itr2 = causalList[i].iterator();
+		    	while(itr.hasNext()){
+		    		int curMutation = ((Integer) itr.next()).intValue();
+		    		int curCausalState = ((Integer) itr2.next()).intValue();
+	 				System.out.print(curMutation + "(" + curCausalState +")" + " ");    		
+		    	}
+			 }
+			 System.out.println("");
+			 
+		 }
+		 */
+    	
+		}//genotype to phenotype
+
+		 //System.out.println("exit");
+		 //System.exit(0);
+		 
+		 
+	}    
+ 
+ 
+
+private void treeMutations() {
+
+ 	int numNodes = treeModel.getNodeCount();
+    // Get sequences
+    String[] sequence = new String[numNodes];
+    
+ // Universal
+	String GENETIC_CODE_TABLES ="KNKNTTTTRSRSIIMIQHQHPPPPRRRRLLLLEDEDAAAAGGGGVVVV*Y*YSSSS*CWCLFLF";
+    int numCodons = -1; //needs to set it, when it looks at the sequence
+
+    for(int curIndex = 0; curIndex < numNodes; curIndex ++){
+		String ns =  (String) treeModel.getNodeAttribute( treeModel.getNode(curIndex), "states");
+		
+		if(endBase == -1){
+			endBase = ns.length() -1;
+		}
+		 
+		
+		 if( (endBase - startBase) % 3 != 0){
+			System.out.println("Nucleotide sequence needs to be triplet to convert to codon - check your startbase and endbase");
+			System.exit(0);
+		 }
+		 if(endBase > (ns.length()-1 ) ){
+			 System.out.println("the last base cannot be greater than the length of the nucleotide. Exit now.");
+			 System.exit(0);
+		 }
+		 if(startBase > (ns.length()-1 ) ){
+			 System.out.println("the start base cannot be greater than the length of the nucleotide. Exit now.");
+			 System.exit(0);
+		 }
+		 if(startBase > endBase){
+			 System.out.println("Start base cannot be greater than the end base");
+			 System.exit(0);
+		 }
+		 
+		numSites = (endBase - startBase)/3;
+	    numCodons = numSites;
+   
+		//System.out.println("startbase = " + startBase);
+		//System.out.println("endbase = " + endBase);
+		//System.out.println("numSites = " + numSites);
+	   
+		ns = ns.substring(startBase, endBase );
+		//ns = ns.substring(3+27, ns.length() - 1);
+		//System.out.println(ns);
+		//System.exit(0);
+		
+		//numCodons = ns.length()/3;  // or do I care about only 330?
+
+		//System.out.println(numCodons);
+		String codonSequence = "";
+		for(int codon=0; codon< numCodons; codon++){
+			
+			int nuc1 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3)];
+			int nuc2 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3+1)];
+			int nuc3 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3+2)];
+			
+			int canonicalState = (nuc1 * 16) + (nuc2 * 4) + nuc3;
+			
+			codonSequence = codonSequence + GENETIC_CODE_TABLES.charAt(canonicalState);
+		}
+		//System.out.println(codonSequence);
+        sequence[curIndex] = codonSequence;
+		
+    }
+
+    mutationList = new LinkedList[ numNodes];
+    mutationString = new String[treeModel.getNodeCount()];
+
+	NodeRef cNode = treeModel.getRoot();
+    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+    
+    visitlist.add(cNode);
+    
+    int countProcessed=0;
+    while(visitlist.size() > 0){
+    	countProcessed++;
+    	//assign value to the current node...
+    	if(treeModel.getParent(cNode) == null){  //this means it is a root node
+    		//visiting the root
+    		//System.out.println(cNode.getNumber() + ":\t" + "root");
+    	}
+    	else{
+    		//visiting
+    		//System.out.print(cNode.getNumber() + ":\t");
+
+    		//String listMutations = "\"";
+    		mutationString[cNode.getNumber()]  = "\"";
+    		String nodeState =  sequence[cNode.getNumber()];
+    		String parentState =  sequence[treeModel.getParent(cNode).getNumber()];
+    		           
+    		int count = 0;
+    		for(int i=0; i < numCodons; i++){
+    			if(nodeState.charAt(i) != parentState.charAt(i)){
+    				count++;
+    				if(count>1){
+    					//System.out.print(",");
+    					mutationString[cNode.getNumber()] =  mutationString[cNode.getNumber()] + ",";
+    				}
+    				//System.out.print(i+1);
+    				mutationString[cNode.getNumber()] =  mutationString[cNode.getNumber()] + (i+1);  //i+1 so mutation starts from 1 - 330
+    				
+    			      // Make sure the list is initialized before adding to it
+    			      if (mutationList[cNode.getNumber()] == null) {
+    			    	  mutationList[cNode.getNumber()] = new LinkedList<Integer>();
+    			      }
+    			      mutationList[cNode.getNumber()].add((i+1));
+    				
+    			}
+    			
+    			//store in linked list
+    		}
+    		//System.out.println("");
+    		mutationString[cNode.getNumber()]  = mutationString[cNode.getNumber()]  + "\"";
+    	}
+    	
+		//System.out.println(cNode.getNumber() + "\t" +  treeModel.getNodeAttribute(cNode, "states") );
+
+    	
+    	//add all the children to the queue
+			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+				NodeRef node= treeModel.getChild(cNode,childNum);
+				visitlist.add(node);
+	        }
+			
+  			
+  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+
+			if(visitlist.size() > 0){
+				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+			}
+			
+		
+   	}
+
+	
+}
+
+
+
+//new version, with probSites determining the indicators
+public double getLogLikelihood() {
+	
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	int K_value = 0; //K_int gets updated
+	for(int i=0; i < indicators.getDimension(); i++){
+		K_value += (int) indicators.getParameterValue(i);
+	}
+	//System.out.println("K_value" + K_value);
+
+	double logL = 0;
+	
+	double muVariance = 1/ muPrecision.getParameterValue(0);
+	double p_onValue = p_on.getParameterValue(0);
+	
+	//sync with the current value of p_on
+    //for(int k=0; k < treeModel.getNodeCount(); k++){
+   	 //probSites.setParameterValue(k, initial_p);
+   	// probSites.setParameterValue(k, p_on.getParameterValue(0) );
+    //} 
+	
+	
+	double muMeanParameter = muMean.getParameterValue(0);
+	//System.out.println("muMeanParameter = " + muMeanParameter);
+	
+	for(int i=0; i < ( N_nodes ) ; i++){
+
+		double mu_i0 = mu.getParameter(i).getParameterValue(0);
+		double mu_i1 = mu.getParameter(i).getParameterValue(1);
+		
+		//if( (int) indicators.getParameterValue(i) == 1){   //Commented out because I am not using P(mu_i = 0 | I_i = 0) = 1
+			logL -=	0.5*(  (mu_i0 - muMeanParameter )*(mu_i0  - muMeanParameter ) + ( mu_i1 )*( mu_i1 )   )/muVariance;
+		//}
+		//System.out.println(logL);
+	}
+
+	
+	//clustering
+	if(probSites == null){
+		logL -= (K_value ) * ( Math.log(2)  + Math.log(Math.PI)+ Math.log(muVariance)  );
+	}
+	//genotype to phenotype
+	else{
+		logL -= (N_nodes ) * ( Math.log(2)  + Math.log(Math.PI)+ Math.log(muVariance)  );
+		
+		//4 priors:
+			//generic:  assumes that each $\pi_j = \pi$, where $\pi \sim Beta(\alpha,\beta)$. The posterior $\pi$ is estimated from the MCMC run.
+			//Saturated prior: allows each $\pi_j$ to be different. $\pi_j$ is assumed to follow the hierarchical $Beta(\alpha, \beta)$ distribution, where $\alpha$ and $\beta$ are fixed according to plausible prior belief. The $Beta$ distribution is chosen because it is conjugate to the categorical distribution, so Gibbs sampling can be accomplished for $\pi$ (See details in the Implementation). 
+			//Shrinkage Prior: models the belief about epitope and non-epitope sites. Here, we define a latent vector of binary indicators for amino acid positions $\delta = (\delta_1, \dots, \delta_L)$, where $\delta_k= 1$ if position $j$ is an epitope site and 0 otherwise. 		
+			//correlated prior: models the correlation among amino acid positions being epitopes or not (Figure \ref{correlationPrior}). This prior is motivated by the knowledge that groups of epitope sites tend to occur at adjacent positions.
+
+		double priorContribution = 0;
+		if(gp_prior.compareTo("correlated") == 0 ){
+			priorContribution = correlatedPriorComputation();
+		}
+		else if(gp_prior.compareTo("shrinkage") == 0 ){
+			priorContribution = shrinkagePriorComputation();
+		}
+		else if(gp_prior.compareTo("generic") == 0 ){
+			priorContribution = genericPriorComputation();
+		}
+		//saturated
+		else if(gp_prior.compareTo("saturated") == 0 ){
+			priorContribution = saturatedPriorComputation();
+		}
+		else{
+			System.out.println("Prior unknown. quit now");
+			System.exit(0);
+		}
+	
+		logL += priorContribution;
+//System.out.println(logL);
+// System.exit(0);
+	if(logL != Double.NEGATIVE_INFINITY){
+		sampleCausativeStates();
+	}
+
+	} //end of genotype to phenotype
+	return(logL);
+}
+
+
+private double saturatedPriorComputation() {
+	double contribution = 0;
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	double []probMutationSite = new double[numSites]; 
+	for(int i=0; i < numSites; i++){
+		probMutationSite[i] = probSites.getParameterValue(i);  
+ 	}
+	
+	//OMIT THE ROOT NODE BECAUSE THERE IS NO MUTATION ANYWAY
+	//(N_nodes - 1)
+	for(int i=0; i < (N_nodes-1) ; i ++){
+		double prob_Node_i_On = 1;
+		double prob_allMutationsOff = 1;
+    	if(mutationList[i] != null){
+	    	Iterator itr = mutationList[i].iterator();
+	    	while(itr.hasNext()){
+	    		int curMutation = ((Integer) itr.next()).intValue();
+				prob_allMutationsOff = prob_allMutationsOff * (1 - probMutationSite[curMutation -1] );  //offset of 1 
+	    	}
+    	}  
+		
+		prob_Node_i_On = 1 - prob_allMutationsOff;
+		if( (int) indicators.getParameterValue(i) == 1){
+			contribution += Math.log(prob_Node_i_On);
+		}
+		else{
+			contribution += Math.log(1-prob_Node_i_On);
+		}
+	}
+	return(contribution);
+}
+private double genericPriorComputation() {
+	double contribution = 0;
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	double probMutationSite = probSites.getParameterValue(0); // a single value.. not a vector
+	
+	//OMIT THE ROOT NODE BECAUSE THERE IS NO MUTATION ANYWAY
+	//(N_nodes - 1)
+	for(int i=0; i < (N_nodes-1) ; i ++){
+		double prob_Node_i_On = 1;
+		double prob_allMutationsOff = 1;
+    	if(mutationList[i] != null){
+	    	Iterator itr = mutationList[i].iterator();
+	    	while(itr.hasNext()){
+	    		int curMutation = ((Integer) itr.next()).intValue();
+				prob_allMutationsOff = prob_allMutationsOff * (1 - probMutationSite );  //offset of 1 
+	    	}
+    	}  
+		
+		prob_Node_i_On = 1 - prob_allMutationsOff;
+		if( (int) indicators.getParameterValue(i) == 1){
+			contribution += Math.log(prob_Node_i_On);
+		}
+		else{
+			contribution += Math.log(1-prob_Node_i_On);
+		}
+	}
+	return(contribution);
+}
+
+private double shrinkagePriorComputation() {
+	double contribution = 0;
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	double []probMutationSite = new double[numSites]; 
+	for(int i=0; i < numSites; i++){
+		probMutationSite[i] = probSites.getParameterValue(i) * siteIndicators.getParameterValue(i) ;  //with the null-ing out the probSites with siteIndicators
+ 	}
+	
+	//OMIT THE ROOT NODE BECAUSE THERE IS NO MUTATION ANYWAY
+	//(N_nodes - 1)
+	for(int i=0; i < (N_nodes-1) ; i ++){
+		double prob_Node_i_On = 1;
+		double prob_allMutationsOff = 1;
+    	if(mutationList[i] != null){
+	    	Iterator itr = mutationList[i].iterator();
+	    	while(itr.hasNext()){
+	    		int curMutation = ((Integer) itr.next()).intValue();
+				prob_allMutationsOff = prob_allMutationsOff * (1 - probMutationSite[curMutation -1] );  //offset of 1 
+	    	}
+    	}  
+		
+		prob_Node_i_On = 1 - prob_allMutationsOff;
+		if( (int) indicators.getParameterValue(i) == 1){
+			contribution += Math.log(prob_Node_i_On);
+		}
+		else{
+			contribution += Math.log(1-prob_Node_i_On);
+		}
+	}
+	
+	
+
+	int numSignificantSites = 0;
+	for(int i=0; i < numSites; i++){
+		if( (int) siteIndicators.getParameterValue(i) ==1){
+			numSignificantSites++;
+		}
+	}
+	int numNonSignificantSites = numSites - numSignificantSites;
+	//contribution +=  (numSignificantSites*Math.log( 0.45   ) + numNonSignificantSites*Math.log( 0.55 )  );
+	//contribution +=  (numSignificantSites*Math.log(0.2) + numNonSignificantSites*Math.log(0.8));
+	contribution += numSignificantSites*Math.log(p_on.getParameterValue(0)) + numNonSignificantSites*Math.log(1-p_on.getParameterValue(0));
+	//System.out.println("#sig= " + numSignificantSites + " #nonsig=" + numNonSignificantSites);
+		
+	
+	return(contribution);
+}
+private double correlatedPriorComputation() {
+	double contribution = 0;
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	// p^k (1-p)^(numNodes - k)
+	//logL += K_value*Math.log( p_onValue ) + (N_nodes - K_value)*Math.log( 1- p_onValue);
+	//int numSites = 330; //now have a private variable 
+	double []probMutationSite = new double[numSites]; 
+	for(int i=0; i < numSites; i++){
+		//probMutationSite[i] = probSites.getParameterValue(i);
+		probMutationSite[i] = probSites.getParameterValue(i) * siteIndicators.getParameterValue(i) ;  //with the null-ing out the probSites with siteIndicators
+ 	}
+	
+	//OMIT THE ROOT NODE BECAUSE THERE IS NO MUTATION ANYWAY
+	//(N_nodes - 1)
+	for(int i=0; i < (N_nodes-1) ; i ++){
+		double prob_Node_i_On = 1;
+		double prob_allMutationsOff = 1;
+//		for( each mutation in the node i){
+//			prob_allMutationsOff = prob_allMutationsOff * (1 - probSites.getParameterValue(curMutation_node_i)); 
+//		}
+    	if(mutationList[i] != null){
+	    	Iterator itr = mutationList[i].iterator();
+	    	while(itr.hasNext()){
+	    		int curMutation = ((Integer) itr.next()).intValue();
+				prob_allMutationsOff = prob_allMutationsOff * (1 - probMutationSite[curMutation -1] );  //offset of 1 
+	    	}
+    	}
+
+	    
+		
+		prob_Node_i_On = 1 - prob_allMutationsOff;
+		if( (int) indicators.getParameterValue(i) == 1){
+			contribution += Math.log(prob_Node_i_On);
+		}
+		else{
+			contribution += Math.log(1-prob_Node_i_On);
+		}
+		//System.out.println(logL);
+		//System.out.println("node=" + i + " prob=" + prob_Node_i_On);
+	}
+	
+	
+
+//	int numSignificantSites = 0;
+//	for(int i=0; i < numSites; i++){
+//		if( (int) siteIndicators.getParameterValue(i) ==1){
+//			numSignificantSites++;
+//		}
+//	}
+//	int numNonSignificantSites = numSites - numSignificantSites;
+//	logL +=  (numSignificantSites*Math.log( 0.45   ) 
+//					+ numNonSignificantSites*Math.log( 0.55 )  );
+	
+	//System.out.println("#sig= " + numSignificantSites + " #nonsig=" + numNonSignificantSites);
+
+	//logL +=  (numSignificantSites*Math.log(0.2) + numNonSignificantSites*Math.log(0.8));
+	
+	
+
+	//transition matrix:
+	
+	// p(0->0) = 0.9, p(0->1) = 0.2
+	// p(1->0) = 0.6  p(1->1) = 0.4
+
+	contribution += Math.log(0.5); //initial
+	int numSignificantSites = 0;
+	int num_00=0;
+	int num_01=0;
+	int num_10=0;
+	int num_11=0;
+	for(int i=1; i < numSites; i++){
+		if( (int) siteIndicators.getParameterValue(i-1) ==0){
+			if( (int) siteIndicators.getParameterValue(i) ==0){
+				num_00++;
+			}
+			else{
+				num_01++;
+			}
+		}
+		else{
+			if( (int) siteIndicators.getParameterValue(i) ==0){
+				num_10++;
+			}
+			else{
+				num_11++;
+			}
+		}
+	}
+	
+	double alpha = prob00; //0 stay as 0
+	double beta = prob11; // 1 stay as 1
+	//		0		1
+	//0 	0.9		0.1
+	//1		0.5		0.5
+	
+	contribution +=  ( num_00*Math.log( alpha ) + num_01*Math.log( 1-alpha )  
+			  +num_10*Math.log( 1-beta ) + num_11*Math.log(beta ) );
+	
+	return(contribution);
+}
+public int[] getCausalCount(){
+	return(causalCount);
+}
+
+public int[] getNonCausalCount(){
+	return(nonCausalCount);
+}
+
+public LinkedList<Integer>[] getMutationsPerNode(){
+	return(mutationList);
+}
+
+public LinkedList<Integer>[] getCausativeStatesPerNode(){
+	return(causalList);
+}
+
+
+//the thing is, if the log likelihood is negative inifinity, this shouldn't be called
+// because that means a siteInidicator gets flipped off... so then none of the mutation on the node is on..
+//this should produce a 0 likelihood.
+//so, no point of realizing the causal state if it is going to be rejected
+
+//as a matter of fact, I think causal state should only be updated upon an acceptance move 
+public void sampleCausativeStates(){
+	    
+	
+    causalCount = new int[numSites];  //HARD CODED
+    nonCausalCount = new int[numSites]; //HARD CODED
+    for(int i=0; i < numSites; i++){
+    	causalCount[i] = 0;
+    	nonCausalCount[i] = 0;
+    }
+	
+	int N_nodes = (int) treeModel.getNodeCount();
+	
+	//resample the whole set of causal states
+    causalList = new LinkedList[ N_nodes];
+	
+	for(int curNode=0; curNode < (N_nodes-1) ; curNode ++){
+		double prob_Node_i_On = 1;
+		double prob_allMutationsOff = 1;
+//		for( each mutation in the node i){
+//			prob_allMutationsOff = prob_allMutationsOff * (1 - probSites.getParameterValue(curMutation_node_i)); 
+//		}
+		
+		if((int) indicators.getParameterValue(curNode) == 0 ){
+	    	if(mutationList[curNode] != null){
+	    		causalList[curNode] = new LinkedList<Integer>();
+		    	Iterator itr = mutationList[curNode].iterator();
+		    	while(itr.hasNext()){
+		    		int curMutation = ((Integer) itr.next()).intValue();
+		    		causalList[curNode].add(new Integer(0));
+		    		nonCausalCount[curMutation -1]++;
+		    	}
+	    	}
+		}
+		else{   //if indicator is 1... then need to sample to get the causal indicator (the mutation(s) that give(s) causal state =1
+	    	if(mutationList[curNode] != null){
+	  
+	    		//System.out.println("cur node is " + curNode);
+	    		
+	    		//count the number of mutations that has nonzero probabilities
+		    	//Iterator itr_it = mutationList[curNode].iterator();
+		    	//int count = 0;
+	    	   	//while(itr_it.hasNext()){
+		    	//	int curMutation = ((Integer) itr_it.next()).intValue();
+		    	//	if(probSites.getParameterValue(curMutation -1) * siteIndicators.getParameterValue(curMutation -1) >0){
+		    	//		count++;
+		    	//	}
+		    	//}
+	    	   	//int numMutations = count;  		
+	    		int numMutations = mutationList[curNode].size();
+
+	    		causalList[curNode] = new LinkedList<Integer>();
+	    		double[] probM = new double[numMutations];
+		    	Iterator itr = mutationList[curNode].iterator();
+		    	int count = 0;
+		    	while(itr.hasNext()){
+		    		int curMutation = ((Integer) itr.next()).intValue();
+					//prob_allMutationsOff = prob_allMutationsOff * (1 - probSites.getParameterValue(curMutation));
+		    		//probM[count]=	probSites.getParameterValue(curMutation -1);
+		    		//if(probSites.getParameterValue(curMutation -1) * siteIndicators.getParameterValue(curMutation -1) >0){
+		    		  if(gp_prior.compareTo("generic") == 0 ){
+		    			   probM[count]=	probSites.getParameterValue(0);
+		    		   }
+		    		  else if(gp_prior.compareTo("saturated") == 0 ){
+		    			   probM[count]=	probSites.getParameterValue(curMutation -1) ;
+		    		   }
+		    		  else if(gp_prior.compareTo("correlated") == 0 ||gp_prior.compareTo("shrinkage") == 0   ){
+		    			   probM[count]=	probSites.getParameterValue(curMutation -1) * siteIndicators.getParameterValue(curMutation -1);
+		    		   }
+		    			count++;
+		    		//}
+		    	}
+		    	   	
+		    	
+		    	//System.out.println("numMutations = " + numMutations);
+		    	//generate all possibilities - the binary tuples (I think I actually don't have to realize it..)
+		    	//int numMutations = 3; //num mutations
+		    	double[] probPossibilities = new double[ (int) Math.pow(2,numMutations)];
+		        //double[] probM = {0.05, 0.1, 0.2};
+		        
+		    	for(int kk=0; kk < Math.pow(2, numMutations); kk++){
+		    	    int input = kk;
+		    		    
+		    	    int[] bits = new int[numMutations];
+		    	    for (int i = (numMutations-1); i >= 0; i--) {
+		    	        bits[i] = ( (input & (1 << i)) != 0  ) ? 1 : 0;;
+		    	    }
+		        	probPossibilities[kk] = 1;
+		    	    for(int curM=0; curM < numMutations; curM++){
+		    	    	if(bits[curM] == 1){
+		    	    		probPossibilities[kk] = probPossibilities[kk] * probM[curM];
+		    	    	}
+		    	    	else{
+		    	    		probPossibilities[kk] = probPossibilities[kk] * (1- probM[curM]);
+		    	    	}
+		    	    }
+		    	}
+		    	probPossibilities[0] = 0; //zero out the 0,0,0
+	    	
+		    	//sample from the possibilities.. and then reconstruct back the binary tuple
+		        int choice = MathUtils.randomChoicePDF(probPossibilities);
+		        //System.out.println("choice is " + choice);
+
+   		        int[] bits = new int[numMutations];
+		        for (int i = (numMutations-1); i >= 0; i--) {
+		            bits[i] = ( (choice & (1 << i)) != 0  ) ? 1 : 0;;
+			        causalList[curNode].add(new Integer(bits[i]));
+			        
+			        if(bits[i] ==1){
+			        	causalCount[mutationList[curNode].get(i).intValue() -1 ]++;
+			        }
+			        else{
+			        	nonCausalCount[mutationList[curNode].get(i).intValue()-1]++;
+			        }
+		        }
+		        //System.out.print(choice + " = " + Arrays.toString(bits) + "\t");
+		    	    
+		     
+		      
+		     
+		    	//System.exit(0);
+		    	
+		    	
+	    	}
+		}
+	}
+	
+	/*
+	for(int i=0; i < 330; i++){
+		System.out.println(i + "\t" + causalCount[i] + " " + nonCausalCount[i]);
+	}
+	System.out.println("=====================");
+	*/
+}
+    
+
+
+/*
+public double getLogLikelihood() {
+	
+	double N_nodes = (double) treeModel.getNodeCount();
+	
+	int K_value = 0; //K_int gets updated
+	for(int i=0; i < indicators.getDimension(); i++){
+		K_value += (int) indicators.getParameterValue(i);
+	}
+	//System.out.println("K_value" + K_value);
+
+	double logL = 0;
+	
+	double muVariance = 1/ muPrecision.getParameterValue(0);
+	double p_onValue = p_on.getParameterValue(0);
+	double muMeanParameter = muMean.getParameterValue(0);
+	//System.out.println("muMeanParameter = " + muMeanParameter);
+		//logL -= (K_value ) * ( Math.log(2)  + Math.log(Math.PI)+ Math.log(muVariance)  );
+	logL -= (N_nodes ) * ( Math.log(2)  + Math.log(Math.PI)+ Math.log(muVariance)  );
+	
+	for(int i=0; i < ( N_nodes ) ; i++){
+
+		double mu_i0 = mu.getParameter(i).getParameterValue(0);
+		double mu_i1 = mu.getParameter(i).getParameterValue(1);
+		
+		//if( (int) indicators.getParameterValue(i) == 1){   //Commented out because I am not using P(mu_i = 0 | I_i = 0) = 1
+			logL -=	0.5*(  (mu_i0 - muMeanParameter )*(mu_i0  - muMeanParameter ) + ( mu_i1 )*( mu_i1 )   )/muVariance;
+		//}
+		//System.out.println(logL);
+	}
+
+	// p^k (1-p)^(numNodes - k)
+	logL += K_value*Math.log( p_onValue ) + (N_nodes - K_value)*Math.log( 1- p_onValue);
+	return(logL);
+}
+*/
+   
+
+
+private void loadIndicators() {
+
+	FileReader fileReader2;
+	try {
+		//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+		//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run64/H3N2_mds.breakpoints.log");
+	//	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run79/H3N2_mds.indicators.log");
+		fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test26/run20-test/H3N2_mds.indicatorsStat-120000.log");
+		
+	      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+	      String line = null;
+
+	      //skip to the last line
+	      String testLine;
+	      while ((testLine = bReader2.readLine()) != null){
+	    	  line = testLine;
+	      }
+
+	    //  System.out.println(line);
+	      
+	      String datavalue[] = line.split("\t");
+
+	      
+	       //   System.out.println(serumLocationsParameter.getParameterCount());
+	      for (int i = 0; i < treeModel.getNodeCount(); i++) {
+	    	  
+	    	  indicators.setParameterValue(i, Double.parseDouble(datavalue[i+1]));
+	    	 // System.out.println(datavalue[i*2+1]);
+//	    	  System.out.println("indicator=" + indicators.getParameterValue(i));
+	   	  
+	      }
+	      bReader2.close();
+	
+	} catch (FileNotFoundException e) {
+		// TODO Auto-generated catch block
+		e.printStackTrace();
+	} catch (IOException e) {
+		// TODO Auto-generated catch block
+		e.printStackTrace();
+	}        
+
+
+
+}
+
+
+
+
+
+private void loadInitialMuLocations() {
+
+	FileReader fileReader2;
+	try {
+		//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+		//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run64/H3N2_mds.mu.log");
+		fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run79/H3N2_mds.mu.log");
+		
+	      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+	      String line = null;
+
+	      //skip to the last line
+	      String testLine;
+	      while ((testLine = bReader2.readLine()) != null){
+	    	  line = testLine;
+	      }
+
+	     // System.out.println(line);
+	      
+	      String datavalue[] = line.split("\t");
+
+	      
+	       //   System.out.println(serumLocationsParameter.getParameterCount());
+	      for (int i = 0; i < mu.getParameterCount(); i++) {
+	    	  
+	    	  double dim1 = Double.parseDouble(datavalue[i*2+1]);
+	    	  double dim2 = Double.parseDouble(datavalue[i*2+2]);
+	    	  mu.getParameter(i).setParameterValue(0, dim1);
+	    	  mu.getParameter(i).setParameterValue(1, dim2);
+	    	 // System.out.println(datavalue[i*2+1]);
+//	    	  System.out.println("mu=" + mu.getParameter(i).getParameterValue(0) +"," + mu.getParameter(i).getParameterValue(1));
+	   	  
+	      }
+	      bReader2.close();
+	
+	} catch (FileNotFoundException e) {
+		e.printStackTrace();
+	} catch (IOException e) {
+		e.printStackTrace();
+	}        
+
+	
+
+}
+
+
+
+
+
+
+
+
+
+private void setMembershipToClusterLabelIndexes(){
+	 int numNodes = treeModel.getNodeCount();
+
+	   //I suspect this is an expensive operation, so I don't want to do it many times,
+	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+	  membershipToClusterLabelIndexes = new int[numdata]; 
+	  for(int i=0; i < numdata; i++){
+		   Parameter v = virusLocations.getParameter(i);
+		   String curName = v.getParameterName();
+		  // System.out.println(curName);
+		   int isFound = 0;
+		   for(int j=0; j < numNodes; j++){
+			   String treeId = treeModel.getTaxonId(j);
+			   if(curName.equals(treeId) ){
+			//	   System.out.println("  isFound at j=" + j);
+				   membershipToClusterLabelIndexes[i] = j;
+				   isFound=1;
+				   break;
+			   }	   
+		   }
+		   if(isFound ==0){
+			   System.out.println("not found. Exit now.");
+			   System.exit(0);
+		   }     	   
+	  }
+	}
+
+
+
+	private void setClusterLabelsParameter() {
+		int K_int = 0;
+	     for(int i=0; i < treeModel.getNodeCount(); i++){
+	   	   if( (int) indicators.getParameterValue( i ) ==1 ){
+	   		  K_int++; 
+	   	   }
+	     }
+	     
+	     int numNodes = treeModel.getNodeCount();
+	     int[] cutNodes = new int[K_int];
+		   int cutNum = 0;
+		   String content = "";
+	     for(int i=0; i < numNodes; i++){
+	  	   if( (int) indicators.getParameterValue( i ) ==1 ){
+	  		   cutNodes[cutNum] = i;
+	  		   content +=  i + ",";
+	  		   cutNum++;
+	  	   }  	  
+	     }
+	     
+	     int []membership = determine_membership(treeModel, cutNodes, K_int);
+	     
+	     for(int i=0; i < numdata; i++){     	   
+	  	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);      	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE, so I have to search for the matching indexes
+	  	   //Parameter vloc = virusLocations.getParameter(i);
+	  	   //System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+	     }
+	     		
+	}
+
+
+	//traverse down the tree, top down, do calculation
+	static int[] determine_membership(TreeModel treeModel, int[] cutNodes, int numCuts){
+
+		
+		//TEMPORARY SOLUTION
+	//load in the titer, corresponding to the taxon #.
+		
+		 TiterImporter titer = null ;
+		 
+	FileReader fileReader;
+		try {
+			fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/data/taxon_y_titer.txt");
+		     titer = new TiterImporter(fileReader);	
+
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+	NodeRef root = treeModel.getRoot();
+
+	int numClusters = 1;
+	LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	list.addFirst(root);
+
+	int[] membership = new int[treeModel.getNodeCount()];
+	for(int i=0; i < treeModel.getNodeCount(); i++){
+		membership[i] = -1;
+	}
+	membership[root.getNumber()] = 0; //root always given the first cluster
+	  
+	while(!list.isEmpty()){
+		//do things with the current object
+		NodeRef curElement = list.pop();
+		//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+		String content = "node #" + curElement.getNumber() +", taxon= " ;
+		if(treeModel.getNodeTaxon(curElement)== null){
+			content += "internal node\t";
+		}
+		else{
+			content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+			//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+		}
+		
+		if(treeModel.getParent(curElement)== null){
+			//content += "no parent";
+		}
+		else{
+			//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+		}
+		
+		//cluster assignment:
+		if(!treeModel.isRoot(curElement)){
+		 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+		//if(isCutNode(curElement.getNumber())){
+			numClusters++ ;
+			membership[ curElement.getNumber() ] = numClusters - 1; 
+		}
+		else{
+			//inherit from parent's cluster assignment
+			membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+		 }
+		        	
+		}//is not Root
+		content += " cluster = " + membership[curElement.getNumber()] ; 
+		
+//		System.out.println(content);
+
+		
+	for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+		list.addFirst(treeModel.getChild(curElement,childNum));
+	}
+	}
+
+	return(membership);
+	}
+
+
+	private static boolean isCutNode(int number, int cutNodes[], int numCut) {
+		if(numCut > 0){
+			for(int i=0; i < numCut; i++){
+				if(number == cutNodes[i]){
+					return true;
+				}
+			}
+		}
+		return false;
+	}
+
+
+
+
+
+
+
+
+
+
+
+
+//=====================================================================================================================
+        
+        
+	public  int factorial(int n) {
+	    int fact = 1; // this  will be the result
+	    for (int i = 1; i <= n; i++) {
+	        fact *= i;
+	    }
+	    return fact;
+	}
+	
+
+    public Model getModel() {
+                return this;
+            }
+
+    public void makeDirty() {
+            }
+
+    public void acceptState() {
+        // DO NOTHING
+    }
+
+    public void restoreState() {
+        // DO NOTHING
+    }
+
+    public void storeState() {
+        // DO NOTHING
+    }
+
+    
+
+    protected void handleModelChangedEvent(Model model, Object object, int index) {
+    	if(model == treeModel){
+    		//System.out.println("==========Tree model changes!!!!!!!!=====");
+    		treeChanged = true;
+    	}
+        else{
+        }
+    }
+    
+    protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+    }
+
+
+
+
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	   	
+    	public final static String EXCISIONPOINTS = "excisionPoints";
+    	public final static String CLUSTERLABELS = "clusterLabels";
+    	public final static String CLUSTERLABELSTREENODE = "clusterLabelsTreeNode";
+
+    	public final static String  MU = "mu";
+
+    	public final static String OFFSETS = "offsets";
+    	public final static String VIRUS_LOCATIONS = "virusLocations";
+    	public final static String VIRUS_LOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	
+    	public final static String INDICATORS = "indicators";
+    	public final static String PROBSITES = "probSites";
+    	public final static String SITEINDICATORS = "siteIndicators";
+
+
+        boolean integrate = false;
+        
+        
+     //   public final static String MUVARIANCE = "muVariance";
+        public final static String MUPRECISION = "muPrecision";
+        public final static String PROBACTIVENODE = "probActiveNode";
+        
+        public final static String INITIALNUMCLUSTERS = "initialK";
+        public final static String MUMEAN = "muMean";
+        
+        public final static String STARTBASE = "startNucleotide";
+        public final static String ENDBASE = "endNucleotide";
+
+        public final static String PROB00 = "prob00";
+        public final static String PROB11 = "prob11";
+        
+
+        
+        public final static String INITIAL_PROBSITE_VALUE = "initialProbSite";
+        
+        public final static String GP_PRIOR_OPTION = "gp_prior";
+        
+        public String getParserName() {
+            return TREE_CLUSTER_VIRUSES;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        	
+        	
+        		double initialK = 10;
+            	if (xo.hasAttribute(INITIALNUMCLUSTERS)) {
+            		initialK = xo.getDoubleAttribute(INITIALNUMCLUSTERS);
+            	}
+
+        		double prob00 = 0.95;
+            	if (xo.hasAttribute(PROB00)) {
+            		prob00 = xo.getDoubleAttribute(PROB00);
+            	}
+        		double prob11 = 0.5;
+            	if (xo.hasAttribute(PROB11)) {
+            		prob11 = xo.getDoubleAttribute(PROB11);
+            	}
+
+            	double initial_probSiteValue = 0.05;
+            	if (xo.hasAttribute(INITIAL_PROBSITE_VALUE)) {
+            		initial_probSiteValue = xo.getDoubleAttribute(INITIAL_PROBSITE_VALUE);
+            	}            	
+
+            	
+
+
+            	
+            	
+        		int startBase = 0;
+            	if (xo.hasAttribute(STARTBASE)) {
+            		startBase = xo.getIntegerAttribute(STARTBASE) - 1; //minus 1 because index begins at 0
+            	}
+        		
+        		int endBase = -1;
+            	if (xo.hasAttribute(ENDBASE)) {
+            		endBase = xo.getIntegerAttribute(ENDBASE) -1 ; //minus 1 because index begins at 0
+            	}
+        		
+        		String gp_prior = "";
+            	if (xo.hasAttribute(GP_PRIOR_OPTION)) {
+            		gp_prior = xo.getStringAttribute(GP_PRIOR_OPTION) ; //minus 1 because index begins at 0
+            	}
+            	
+            	
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+                XMLObject cxo = xo.getChild(CLUSTERLABELS);
+                Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(CLUSTERLABELSTREENODE);
+                Parameter clusterLabelsTreeNode = (Parameter) cxo.getChild(Parameter.class);
+
+                
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+                
+                cxo=xo.getChild(VIRUS_LOCATIONS);
+                MatrixParameter virusLocations =(MatrixParameter) cxo.getChild(MatrixParameter.class);
+                
+                cxo = xo.getChild(VIRUS_LOCATIONSTREENODE);
+                MatrixParameter virusLocationsTreeNode =(MatrixParameter) cxo.getChild(MatrixParameter.class);
+                
+                cxo = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+  
+                cxo = xo.getChild(SITEINDICATORS);
+                Parameter siteIndicators = null;
+                if(cxo != null){
+                  siteIndicators = (Parameter) cxo.getChild(Parameter.class);
+                }
+                
+                cxo = xo.getChild(PROBSITES);
+                Parameter probSites = null;
+                if(cxo != null){
+                	probSites = (Parameter) cxo.getChild(Parameter.class);
+                }
+                
+                cxo = xo.getChild(MUPRECISION);
+                Parameter muPrecision = (Parameter) cxo.getChild(Parameter.class);
+                
+                cxo = xo.getChild(PROBACTIVENODE);
+                Parameter probActiveNode = (Parameter) cxo.getChild(Parameter.class);
+                
+		        boolean hasDrift = false;
+		
+		        cxo = xo.getChild(MUMEAN);
+		        Parameter muMean = (Parameter) cxo.getChild(Parameter.class);
+		        
+		        return new TreeClusteringVirusesPrior(treeModel, indicators, clusterLabels, clusterLabelsTreeNode, mu, hasDrift, virusLocations, virusLocationsTreeNode, muPrecision, probActiveNode, initialK, muMean, probSites, siteIndicators,
+		        		startBase, endBase, gp_prior, prob00, prob11, initial_probSiteValue); 
+            }
+
+            //************************************************************************
+            // AbstractXMLObjectParser implementation
+            //************************************************************************
+
+            public String getParserDescription() {
+                return "tree clustering viruses";
+            }
+
+            public Class getReturnType() {
+                return TreeClusteringVirusesPrior.class;
+            }
+
+            public XMLSyntaxRule[] getSyntaxRules() {
+                return rules;
+            }
+            
+            
+            private final XMLSyntaxRule[] rules = {
+                    //AttributeRule.newDoubleRule(MUVARIANCE, true, "the variance of mu"),
+                    //AttributeRule.newDoubleRule(PROBACTIVENODE, true, "the prior probability of turning on a node"),
+            		AttributeRule.newDoubleRule(INITIALNUMCLUSTERS, true, "the initial number of clusters"),
+            		AttributeRule.newDoubleRule(STARTBASE, true, "the start base in the sequence to consider in the genotype to phenotype model"),
+            		AttributeRule.newIntegerRule(ENDBASE, true, "the end base in the sequence to consider in the genotype to phenotype model"),
+            		AttributeRule.newIntegerRule(STARTBASE, true, "the start base in the sequence to consider in the genotype to phenotype model"),
+            		AttributeRule.newDoubleRule(PROB00, true, "correlated prior - the probability of staying at state 0 for adjacent siteIndicator"),
+            		AttributeRule.newDoubleRule(PROB11, true, "correlated prior - the probability of staying at state 1 for adjacent siteIndicator"),
+            		AttributeRule.newStringRule(GP_PRIOR_OPTION, true, "specifying the prior for probSites (and siteIndicators)"),
+            		AttributeRule.newDoubleRule(INITIAL_PROBSITE_VALUE, true, "the initial value of the probSite"),
+            		
+                    new ElementRule(EXCISIONPOINTS, Parameter.class),
+                    new ElementRule(CLUSTERLABELS, Parameter.class),
+                    new ElementRule(CLUSTERLABELSTREENODE, Parameter.class),
+                    new ElementRule(VIRUS_LOCATIONSTREENODE, MatrixParameter.class),
+                    new ElementRule(MU, MatrixParameter.class),
+                 //   new ElementRule(OFFSETS, Parameter.class),
+                    new ElementRule(VIRUS_LOCATIONS, MatrixParameter.class), 
+                    new ElementRule(INDICATORS, Parameter.class),
+                    //make it so that it isn't required
+                    new ElementRule(SITEINDICATORS, Parameter.class, "the indicator of a site having probability greater than 0 of being associated with antigenic transition", true),
+                    new ElementRule(PROBSITES, Parameter.class, "the probability that mutation on a site is associated with antigenic transition", true),
+                    new ElementRule(TreeModel.class),
+                    new ElementRule(MUPRECISION, Parameter.class),
+                    new ElementRule(PROBACTIVENODE, Parameter.class),
+                    new ElementRule(MUMEAN, Parameter.class)
+            };
+            
+    };
+
+    String Atribute = null;
+
+
+	public int getNumSites() {
+		return numSites;
+	}
+	public int getStartBase() {
+		return startBase;
+	}
+	public int getEndBase() {
+		// TODO Auto-generated method stub
+		return endBase;
+	}
+        
+}
+
+
+
+
+
+
+
+
+
+
+
+
+//load initial serum location - load the last line
+//OBSOLETE WITH THE NEW indicators	
+/*
+private void loadBreakpoints() {
+
+		FileReader fileReader2;
+		try {
+			
+			//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test23/run4/H3N2_mds.breakPoints.log");
+			
+			fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run5/H3N2_mds.breakpoints.log");
+		      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+		      String line = null;
+	
+		      //skip to the last line
+		      String testLine;
+		      while ((testLine = bReader2.readLine()) != null){
+		    	  line = testLine;
+		      }
+
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+
+		      
+		       //   System.out.println(serumLocationsParameter.getParameterCount());
+		      for (int i = 0; i < treeModel.getNodeCount(); i++) {
+		    	  
+		    	  breakPoints.setParameterValue(i, Double.parseDouble(datavalue[i+1]));
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  System.out.println("indicators=" + breakPoints.getParameterValue(i));
+		   	  
+		      }
+		      bReader2.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}        
+  
+	
+
+}
+
+*/
+ 
+	
+//OBSOLETE WITH INDICATORS..
+//load initial serum location - load the last line
+/*
+private void loadStatus() {
+
+		FileReader fileReader2;
+		try {
+			//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+			fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test23/run4/H3N2_mds.status.log");
+			
+		      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+		      String line = null;
+	
+		      //skip to the last line
+		      String testLine;
+		      while ((testLine = bReader2.readLine()) != null){
+		    	  line = testLine;
+		      }
+
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+
+		      
+		       //   System.out.println(serumLocationsParameter.getParameterCount());
+		      for (int i = 0; i < binSize; i++) {
+		    	  
+		    	  indicators.setParameterValue(i, Double.parseDouble(datavalue[i+1]));
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  System.out.println("excisionPoints=" + indicators.getParameterValue(i));
+		   	  
+		      }
+		      bReader2.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}        
+  
+	
+
+}
+*/
+
+
+
+/*
+
+
+
+
+
+
+
+  
+private void preClustering() {
+	
+	
+	int numNodes = treeModel.getNodeCount(); //need to re-evaluate
+	
+	int []isOccupied = new int[numNodes];
+	 //initial bag:
+	   //int numNonZeroIndicators = 0;
+	   //nonZeroIndexes = new int[numNodes]; //another variable for quick access of the indexes that are turned on - only the first numNonZeroIndicators are meaningful
+	   
+	   //assumption: numOn << than the numNodes
+	   for(int k=0; k< treeModel.getNodeCount(); k++){
+	   	//sample another one.
+	   	 int sampleNew = 1;
+	   	 while(sampleNew ==1){
+	   		int rSiteIndex = (int) Math.floor( Math.random()*numNodes );
+	   		if(isOccupied[rSiteIndex] == 0){
+	   			//success sampling
+	   			//System.out.println("rSiteIndex "+  rSiteIndex);
+	   			indicators.setParameterValue( rSiteIndex, 1);
+	   			isOccupied[rSiteIndex] = 1;
+	   			//nonZeroIndexes[numNonZeroIndicators] = rSiteIndex;
+	   			//numNonZeroIndicators++; // be filled to be equal to the numOn
+	   			sampleNew = 0;
+	   		}
+	   	 }
+	   }
+	   
+
+	   int K_int = (int) K.getParameterValue(0);
+	   for(int k=0; k< (K_int-1); k++){
+		   	//sample another one.
+		   	 int sampleNew = 1;
+		   	 while(sampleNew ==1){
+		   		int rSiteIndex = (int) Math.floor( Math.random()*numNodes );
+		   		if(indicators.getParameterValue(rSiteIndex) == 0){
+		   			//success sampling
+		   			indicators.setParameterValue(rSiteIndex , 1);
+		   			//nonZeroIndexes[numNonZeroIndicators] = rSiteIndex;
+		   			//numNonZeroIndicators++; // be filled to be equal to the numOn
+		   			sampleNew = 0;
+		   		}
+
+		   	 }
+		 }
+		   
+	   //for(int i=0; i < binSize; i++){
+		//   System.out.println("excision point = " + excisionPoints.getParameterValue(i));
+	   //}
+
+	   //for(int i=0; i < numNonZeroIndicators; i++){
+	   	//System.out.println(nonZeroIndexes[i]);	
+	   //}
+
+	
+//       NodeRef node = treeModel.getRoot();
+       
+      // if(treeModel.isExternal(node)){
+    //	   System.out.println("External node");
+     //  }
+      // else{
+    //	   System.out.println("Internal node");
+     //  }
+	
+       
+       int[] cutNodes = new int[K_int];
+	   int cutNum = 0;
+       for(int i=0; i < numNodes; i++){
+    	   if( (int) indicators.getParameterValue( i ) ==1 ){
+    		   cutNodes[cutNum] =  i;
+    		   cutNum++;
+    	   }
+    	  
+       }
+         
+   //    for(int i=0; i < K_int; i++){
+   // 	   System.out.println(cutNodes[i]);
+    //   }
+       
+       int []membership = determine_membership(treeModel, cutNodes, K_int-1);                           
+       
+    //   System.out.println("number of nodes = " + treeModel.getNodeCount());
+     //  for(int i=0; i < treeModel.getNodeCount(); i++){
+    //	   System.out.println(membership[i]);
+     //  }
+       
+       
+       //System.out.println("Done");
+       
+     //  for(int i=0; i < numdata; i++){
+	//	   Parameter v = virusLocations.getParameter(i);
+	//	   String curName = v.getParameterName();
+	//	   System.out.println("i=" + i + " = " + curName);       
+	//	}       
+       
+     //  for(int j=0; j < numdata; j++){
+    //	   System.out.println("j=" + j + " = " + treeModel.getTaxonId(j));
+     //  }
+       
+       
+	//   Parameter vv = virusLocations.getParameter(0);
+	 //  String curNamev = vv.getParameterName();
+	   
+	 //  System.out.println(curNamev + " and " +treeModel.getTaxonId(392) );
+	   //System.out.println(  curNamev.equals(treeModel.getTaxonId(392) )  );
+	   
+       
+       //System.exit(0);
+       
+	  // System.out.println("numNodes=" + numNodes);
+	  // System.exit(0);
+       //create dictionary:
+	   
+	   //I suspect this is an expensive operation, so I don't want to do it many times,
+	   //which is also unnecessary
+       int []membershipToClusterLabelIndexes = new int[numdata]; 
+       for(int i=0; i < numdata; i++){
+		   Parameter v = virusLocations.getParameter(i);
+		   String curName = v.getParameterName();
+		  // System.out.println(curName);
+		   int isFound = 0;
+    	   for(int j=0; j < numNodes; j++){
+    		   String treeId = treeModel.getTaxonId(j);
+    		   if(curName.equals(treeId) ){
+    		//	   System.out.println("  isFound at j=" + j);
+    			   membershipToClusterLabelIndexes[i] = j;
+    			   isFound=1;
+    			   break;
+    		   }
+    		   
+    	   }
+    	   if(isFound ==0){
+    		   System.out.println("not found. Exit now.");
+    		   System.exit(0);
+    	   }
+       }
+       
+       
+      // System.exit(0);
+       
+     //  for(int i=0; i < numdata; i++){
+    //	   System.out.println(membershipToClusterLabelIndexes[i]);
+     //  }
+      // System.exit(0);
+       
+       for(int i=0; i < numdata; i++){
+    	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+    	   //so I have to search for the matching indexes
+    	   Parameter vloc = virusLocations.getParameter(i);
+    	   
+    	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+    //	   System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+    	   
+    	 //  Parameter v = virusLocations.getParameter(i);
+    	  // System.out.println(v.getParameterName());
+       }
+       
+       
+   //    System.out.println("Exit now");
+   //    System.exit(0);
+       
+
+	int numViruses = offsets.getSize();
+	System.out.println("# offsets = " + offsets.getSize());
+	//for(int i=0; i < offsets.getSize(); i++){
+		//System.out.println(offsets.getParameterValue(i));
+	//}
+	
+	
+	//Need a routine to convert the membership back to clusterlabels for external nodes..
+
+	
+	int maxLabel=0;
+	for(int i=0;i< numdata; i++){
+		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+			maxLabel = (int) clusterLabels.getParameterValue(i);
+		}
+	}
+	
+	
+	//now, change the mu..
+	for(int i=0; i <= maxLabel; i++){
+		//System.out.println(meanYear[i]*beta);
+		//mu.getParameter(i).setParameterValue(0, meanYear[i]*beta);//now separate out mu from virusLocation
+		mu.getParameter(i).setParameterValue(0, 0);
+		mu.getParameter(i).setParameterValue(1, 0);
+	}	
+	
+	
+	//System.exit(0);
+
+*/
\ No newline at end of file
diff --git a/src/dr/evomodel/antigenic/phyloClustering/Tree_Clustering_Shared_Routines.java b/src/dr/evomodel/antigenic/phyloClustering/Tree_Clustering_Shared_Routines.java
new file mode 100644
index 0000000..3666e8f
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/Tree_Clustering_Shared_Routines.java
@@ -0,0 +1,224 @@
+package dr.evomodel.antigenic.phyloClustering;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+
+
+
+public class Tree_Clustering_Shared_Routines {
+
+	
+	public static int[] setMembershipTreeToVirusIndexes(int numdata, MatrixParameter virusLocations, int numNodes, TreeModel treeModel ){
+
+	  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+	  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+	         int[] correspondingTreeIndexForVirus = new int[numdata]; 
+	         for(int i=0; i < numdata; i++){
+	  		   Parameter v = virusLocations.getParameter(i);
+	  		   String curName = v.getParameterName();
+	  		  // System.out.println(curName);
+	  		   int isFound = 0;
+	      	   for(int j=0; j < numNodes; j++){
+	      		   String treeId = treeModel.getTaxonId(j);
+	      		   if(curName.equals(treeId) ){
+	      		//	   System.out.println("  isFound at j=" + j);
+	      			   correspondingTreeIndexForVirus[i] = j;
+	      			   isFound=1;
+	      			   break;
+	      		   }	   
+	      	   }
+	      	   if(isFound ==0){
+	      		   System.out.println("not found. Exit now.");
+	      		   System.exit(0);
+	      	   }     	   
+	         }
+	         
+	         return(correspondingTreeIndexForVirus);
+	    }
+	
+	public static void updateUndriftedVirusLocations(int numNodes, int numdata, TreeModel treeModel, MatrixParameter virusLocationsTreeNode, Parameter indicators, MatrixParameter mu, 		    MatrixParameter virusLocations, int[] correspondingTreeIndexForVirus){
+			double[][] nodeloc = new double[numNodes][2];
+
+			//process the tree and get the vLoc of the viruses..
+			//breadth first depth first..
+			NodeRef cNode = treeModel.getRoot();
+		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+		    
+		    visitlist.add(cNode);
+		    
+		    int countProcessed=0;
+		    while(visitlist.size() > 0){
+		    	countProcessed++;
+		    	//assign value to the current node...
+		    	if(treeModel.getParent(cNode) == null){  //this means it is a root node
+		    		Parameter curMu = mu.getParameter( cNode.getNumber() );
+		    		//Parameter curMu = mu.getParameter(0);
+		    		nodeloc[cNode.getNumber()][0]  = curMu.getParameterValue(0);
+		    		nodeloc[cNode.getNumber() ][1] = curMu.getParameterValue(1);
+		    		
+		    		
+		    		Parameter curVirusLoc = virusLocationsTreeNode.getParameter(cNode.getNumber());
+		    		curVirusLoc.setParameterValue(0, curMu.getParameterValue(0) );
+		    		curVirusLoc.setParameterValue(1, curMu.getParameterValue(1) );
+		    	}
+		    	else{
+		    		nodeloc[cNode.getNumber()][0] =   nodeloc[treeModel.getParent(cNode).getNumber()][0];
+		    		nodeloc[cNode.getNumber()][1] =   nodeloc[treeModel.getParent(cNode).getNumber()][1];
+		    		
+		    		if( (int) indicators.getParameterValue(cNode.getNumber()) == 1){
+		    			Parameter curMu = mu.getParameter(cNode.getNumber() ); // no +1 because I don't need another mu- the root's mu takes care of the first cluster's mu 
+			    		//Parameter curMu = mu.getParameter(cNode.getNumber() +1); //+1 because mu0 is reserved for the root.
+		    			nodeloc[cNode.getNumber()][0] += curMu.getParameterValue(0);
+		    			nodeloc[cNode.getNumber()][1] += curMu.getParameterValue(1);	  			    			
+		    		}
+		    		
+		    		Parameter curVirusLoc = virusLocationsTreeNode.getParameter(cNode.getNumber());
+		    		curVirusLoc.setParameterValue(0, nodeloc[cNode.getNumber()][0] );
+		    		curVirusLoc.setParameterValue(1,nodeloc[cNode.getNumber()][1] );
+		    	}
+		    	
+		    	//add all the children to the queue
+	  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+	  				NodeRef node= treeModel.getChild(cNode,childNum);
+	  				visitlist.add(node);
+	  	        }
+	  			
+		  			
+		  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+	
+	  			if(visitlist.size() > 0){
+	  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+	  			}
+	  			
+  			
+		}
+
+		    //write the virus locations
+		    for(int i=0; i < numdata; i++){
+		    	Parameter vLocParameter = virusLocations.getParameter(i);
+		    	vLocParameter.setParameterValue(0, nodeloc[correspondingTreeIndexForVirus[i]][0]);
+		    	vLocParameter.setParameterValue(1, nodeloc[correspondingTreeIndexForVirus[i]][1]);
+		    }
+			
+		    
+		    //for(int i=0; i < numdata; i++){
+				//Parameter vLocP= virusLocations.getParameter(i);
+		    	//System.out.println("virus " + vLocP.getId() + "\t" + vLocP.getParameterValue(0) + "," + vLocP.getParameterValue(1)  );	  			    	
+		    //}
+		    	
+	}
+
+	
+	
+	
+	
+	//may be very inefficient
+	public static int findAnOnNodeIncludingRootRandomly(int numNodes, Parameter indicators) {
+    	int isOn= 0;
+    	int I_selected = -1;
+  		while(isOn ==0){
+  			I_selected = (int) (Math.floor(Math.random()*numNodes));
+  			isOn = (int) indicators.getParameterValue(I_selected);  			
+  		}    	  		
+  		
+    	return I_selected;
+	}
+
+	
+	
+	//Copied from TreeClusterAlgorithm  - should have put into the shared class...
+
+
+	public static LinkedList<Integer> findActiveBreakpointsChildren(int selectedNodeNumber, int numNodes, TreeModel treeModel, Parameter indicators) {
+			
+			//a list of breakpoints...
+			
+			LinkedList<Integer> linkedList = new LinkedList<Integer>();
+			int[] nodeBreakpointNumber = new int[numNodes];
+						
+			//int[] nodeStatus = new int[numNodes];
+			//for(int i=0; i < numNodes; i ++){
+			//	nodeStatus[i] = -1;
+			//}
+			
+			//convert to easy process format.
+			//for(int i=0; i < (binSize ); i++){
+			//	if((int) indicators.getParameterValue(i) ==1){
+			//		  nodeStatus[(int)breakPoints.getParameterValue(i)] = i;
+			//	}
+			//}
+			
+			//process the tree and get the vLoc of the viruses..
+			//breadth first depth first..
+			NodeRef cNode = treeModel.getRoot();
+		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+
+		    
+		    visitlist.add(cNode);
+		    
+		    
+		    //I am not sure if it still works......
+		    
+		    int countProcessed=0;
+		    while(visitlist.size() > 0){
+		    	
+		    	
+		    	countProcessed++;
+		    	//assign value to the current node...
+		    	if(treeModel.getParent(cNode) == null){
+		    		//Parameter curMu = mu.getParameter(0);
+		    		nodeBreakpointNumber[cNode.getNumber()] =   cNode.getNumber();
+		    	}
+		    	else{
+		    		nodeBreakpointNumber[cNode.getNumber()] =   nodeBreakpointNumber[treeModel.getParent(cNode).getNumber()];
+		    		//System.out.println("node#" + cNode.getNumber() + " is " + nodeBreakpointNumber[cNode.getNumber()]); 
+
+		    		if( (int) indicators.getParameterValue(cNode.getNumber()) == 1){
+		    			//System.out.println(cNode.getNumber() + " is a break point");
+			    		//Parameter curMu = mu.getParameter(cNode.getNumber() +1); //+1 because mu0 is reserved for the root.
+		    			//Parameter curMu = mu.getParameter(cNode.getNumber() ); //+1 because mu0 is reserved for the root.
+			    		
+			    		//see if parent's status is the same as the selectedIndex
+			    		if( nodeBreakpointNumber[cNode.getNumber()] ==   selectedNodeNumber ){
+			    			//System.out.println("hihi");
+			    			linkedList.add( cNode.getNumber() );
+			    		}
+			    		//now, replace this nodeBreakpointNumber with its own node number
+			    		nodeBreakpointNumber[cNode.getNumber()] = cNode.getNumber();
+			    				    			  			    			
+		    		}
+		    	}
+		    	
+		    	
+		    	//add all the children to the queue
+	  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+	  				NodeRef node= treeModel.getChild(cNode,childNum);
+	  				visitlist.add(node);
+	  	        }
+	  			
+		  			
+		  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+
+	  			if(visitlist.size() > 0){
+	  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+	  			}
+	  			
+				
+		    }
+		    
+		    //System.out.println("Now printing children of "  + selectedNodeNumber+":");
+			//for(int i=0; i < linkedList.size(); i++){
+			//	System.out.println( linkedList.get(i)  );
+			//}
+			
+			return linkedList;
+		}
+
+		
+
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/mergeAdjacencyMatrixPlots.java b/src/dr/evomodel/antigenic/phyloClustering/misc/mergeAdjacencyMatrixPlots.java
new file mode 100644
index 0000000..7035213
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/mergeAdjacencyMatrixPlots.java
@@ -0,0 +1,276 @@
+package dr.evomodel.antigenic.phyloClustering.misc;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.ListIterator;
+
+public class mergeAdjacencyMatrixPlots {
+
+    public static void main(String[] args) {
+    	
+    	int NUM_BURNINs = 250;
+
+    	// 	    	/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/
+
+    	//int numdata = 363;
+
+ 		FileReader fileReader2;
+
+ 		try {	
+ 			
+ 	    	int numdata = 115; // H1N1
+
+ 			String input="/Users/charles/Documents/researchData/clustering/forManuscripts-moreReplicates/H1N1/mds0_8/H1N1_mds.clusterLabels.log";
+ 			String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript7-31-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_8-adjacencyMatrix.txt";
+ 	    	
+ 	    	
+ 		//	String input="/Users/charles/Documents/researchData/clustering/forManuscripts-moreReplicates/H1N1/mds0_7/H1N1_mds.clusterLabels.log";
+ 		//	String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript7-31-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_7-adjacencyMatrix.txt";
+ 	    	
+ 	    	
+ 		//	String input="/Users/charles/Documents/researchData/clustering/forManuscripts-moreReplicates/H1N1/mds0_6/H1N1_mds.clusterLabels.log";
+ 		//	String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript7-31-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_6-adjacencyMatrix.txt";
+
+ 	    	
+ 		//	String input="/Users/charles/Documents/researchData/clustering/forManuscripts-moreReplicates/H1N1/mds0_5/H1N1_mds.clusterLabels.log";
+ 		//	String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript7-31-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_5-adjacencyMatrix.txt";
+
+ 		//	String input="/Users/charles/Documents/researchData/clustering/forManuscripts-moreReplicates/H1N1/mds0_4/H1N1_mds.clusterLabels.log";
+ 		//	String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript7-31-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_4-adjacencyMatrix.txt";
+
+ 	    	
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-adjacencyMatrix.txt";
+ 			
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_3/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_3-adjacencyMatrix.txt";
+
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-adjacencyMatrix.txt";	    	
+
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1-sample2/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-sample2-adjacencyMatrix.txt";
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1-sample3/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-sample3-adjacencyMatrix.txt";
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1-sample4/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-sample4-adjacencyMatrix.txt";
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1-sample5/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-sample5-adjacencyMatrix.txt";
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H1N1/C3b-mds0_1-sample6/H1N1_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H1N1/adjacencyMatrices/H1N1-C3b-mds0_1-sample6-adjacencyMatrix.txt";
+
+ 			
+ 	    //	int numdata = 402; //H3N2
+			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_1/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_1-adjacencyMatrix.txt";
+ 			
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-adjacencyMatrix.txt";
+			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_15/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_15-adjacencyMatrix.txt";
+			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-adjacencyMatrix.txt";
+
+ 		
+ 			
+ 			
+ 	    	//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05-sample2/H3N2_mds.clusterLabels.log";
+ 	    	//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-sample2-adjacencyMatrix.txt";
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05-sample3/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-sample3-adjacencyMatrix.txt";
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05-sample4/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-sample4-adjacencyMatrix.txt";
+ 			//String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05-sample5/H3N2_mds.clusterLabels.log";
+ 			//String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-sample5-adjacencyMatrix.txt";
+ 		//	String input="/Users/charles/Documents/researchData/clustering/forManuscript/H3N2/C3b-mds0_05-sample6/H3N2_mds.clusterLabels.log";
+ 		//	String output="/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysisManuscript2-12-2015/H3N2/adjacencyMatrices/H3N2-C3b-mds0_05-sample6-adjacencyMatrix.txt";
+
+ 	    	
+ 			
+	
+ 	    	
+ 	    	
+ 	    	fileReader2 = new FileReader(input);
+ 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter(output)); 
+
+
+ 			
+  			 // fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/H3N2ddCRP.log");
+ 			 //fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/testModel/longRun/H3N2ddCRP.log");
+// 			fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/testModel/30Mrun/H3N2ddCRP-1to6M.log");
+ 		//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/varyParameters/fixK20/H3N2_mds.clusterLabels.log");
+// 			  fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/varyParameters-fullData/default/H3N2_mds.clusterLabels.log");
+ 			
+ 			 
+ 			//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/H1N1/mds0_15/H1N1_mds.clusterLabels.log");
+ 			//BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H1N1/summaryCooccurrence.txt")); 
+ 	    	//int numdata = 115; // H1N1
+
+ 			//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec19/H1N1/mds0_3/H1N1_mds.clusterLabels.log");
+ 			//BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H1N1/summaryCooccurrence-mds0_3.txt")); 
+ 	    	//int numdata = 115; // H1N1
+ 	    	
+ 	    	
+// 	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/Vic/mds0_15-prior2/Vic_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/Vic/summaryCooccurrence.txt")); 
+// 	    	int numdata = 179; //Vic
+
+// 	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec19/Vic/mds0_3-prior4/Vic_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/Vic/summaryCooccurrence-mds0_3-prior4.txt")); 
+// 	    	int numdata = 179; //Vic
+
+// 			fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec19/Vic/mds0_2-prior4/Vic_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/Vic/summaryCooccurrence-mds0_2-prior4.txt")); 
+// 	    	int numdata = 179; //Vic
+
+ 			
+ 			
+// 	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/Yam/mds0_15-prior2/Yam_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/Yam/summaryCooccurrence.txt")); 
+// 	    	int numdata = 174; //Yam
+
+// 	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec19/Yam/mds0_2-prior4/Yam_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/Yam/summaryCooccurrence-mds0_2-prior4.txt")); 
+// 	    	int numdata = 174; //Yam
+
+
+ 	    	
+
+// 	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/H3N2/mds0_15/H3N2_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H3N2/summaryCooccurrence_0_15.txt")); 
+// 	    	int numdata = 402; //H3N2
+	
+ 			
+//	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/H3N2/mds0_1/H3N2_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H3N2/summaryCooccurrence_0_1.txt")); 
+// 	    	int numdata = 402; //H3N2
+
+//	    	fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/tmpDec18/H3N2/mds0_05/H3N2_mds.clusterLabels.log");
+// 			BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H3N2/summaryCooccurrence_0_05.txt")); 
+// 	    	int numdata = 402; //H3N2
+			
+ 			
+ 	    	
+ 	    	//h3n2 full data doesn't fix mds
+	    	//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/varyParameters-fullData/default/H3N2_mds.clusterLabels.log");
+ 			//BufferedWriter outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fluTypes/H3N2/old_summaryCooccurrence.txt")); 
+ 	    	//int numdata = 402; //H3N2
+ 			
+ 			 
+// 			
+
+ 			  
+ 			  
+
+
+ 			 
+		      //print the matrix into a file
+		     
+		    //outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/summarizeClusters.txt"));
+		    //outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/testModel/30Mrun/summarizeClusters1to6M.txt"));
+		    //outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fixK20/summaryCooccurence.txt"));
+		    //outputWriter = new BufferedWriter(new FileWriter("/Users/charles/Documents/research/antigenic/GenoPheno/driver/clustering/analysis/fullData-default/summaryCooccurence.txt"));
+
+ 			
+ 			 
+ 			 
+		      BufferedReader bReader2 = new BufferedReader( fileReader2); 		      
+ 		     int[][] coOccur = new int[numdata][numdata];
+ 		     //assume initializes to 0
+ 		      
+ 		     String line2;
+ 		      
+ 		      //this routine may give false results if there are extra lines with spaces
+ 		     
+ 		     //ignore the first few lines of the file
+ 		      line2 = bReader2.readLine();
+ 		      line2 = bReader2.readLine();
+ 		      line2 = bReader2.readLine();
+
+ 		      for(int i=0; i < NUM_BURNINs; i++){
+ 		    	 line2 = bReader2.readLine();
+ 		      }
+
+ 		      
+ 		      System.out.println(line2);
+ 		      
+ 		      while((line2 = bReader2.readLine())   != null){
+ 		      System.out.println(line2); 		      
+ 		      
+ 		      String datavalue2[] = line2.split("\t");
+ 		          
+ 		     LinkedList<Integer>[] clusterLL = new LinkedList[numdata];
+ 		    for (int i = 0; i < clusterLL.length; i++){
+ 		        clusterLL[i] = new LinkedList<Integer>();
+ 		    }
+ 		     
+ 		      for (int i = 0; i < numdata; i++) { 
+ 		    	 //put data into cluster bin linked list	
+ 		    	  int num =  (int) Double.parseDouble(datavalue2[i+2]);
+ 		    	 // System.out.println(num);
+ 		    	 clusterLL[num ].add(i);
+ 		      }
+ 		      for (int i = 0; i < numdata; i++) {
+ 		    	  //System.out.print("Bin=" + i + " ");
+ 		    	 int clusterSize = clusterLL[i].size();
+ 		    	 if(clusterSize >0){
+ 	 		    	int[] clusterArray = new int[clusterSize]; 
+ 		    		ListIterator<Integer> iter = clusterLL[i].listIterator();
+ 		    		for(int j=0; j < clusterSize; j++){
+ 		    			clusterArray[j] = iter.next();
+ 		    			//System.out.print(clusterArray[j] + " ");
+ 		    		}
+ 		    	 //System.out.println("");
+                 //for each cluster, increment each pair of points in the coocurrence matrix 		    	
+ 		    		for(int a=0; a< clusterSize; a++){
+ 		    		 for(int b=a; b< clusterSize; b++){
+ 		    			 coOccur[ clusterArray[a]][clusterArray[b]] ++;
+ 		    		 }
+ 		    		}
+ 		    	 }//clusterSize >0
+                //          
+
+                   
+ 		      }
+ 	 		 
+ 		      }//while
+ 		      bReader2.close();
+ 
+ 		      //write the lower triangle in the symmetric matrix
+ 		      for(int i=0; i< numdata; i++){
+ 		    	  for(int j=(i+1); j< numdata; j++){
+ 		 		     coOccur[j][i] = coOccur[i][j]; 		    		  
+ 		    	  }
+ 		      }
+
+ 		      
+
+ 		    for(int i=0; i < numdata; i++){
+ 		    	for(int j=0; j < numdata; j++){
+ 		    		outputWriter.write(coOccur[i][j] + " ");// Here I know i cant just write x[0] or anything. Do i need to loop in order to write the array?
+ 		    	}
+ 	 		    outputWriter.newLine();
+ 	 		    outputWriter.flush();  
+ 		    }
+
+ 		    outputWriter.close(); 
+ 		      
+ 		      
+ 		} catch (FileNotFoundException e) {
+ 			// TODO Auto-generated catch block
+ 			e.printStackTrace();
+ 		} catch (IOException e) {
+ 			// TODO Auto-generated catch block
+ 			e.printStackTrace();
+ 		}       
+    	
+
+    }
+	
+}
+
diff --git a/src/dr/evomodel/antigenic/AntigenicLikelihood.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
similarity index 78%
copy from src/dr/evomodel/antigenic/AntigenicLikelihood.java
copy to src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
index 351567f..cbaec30 100644
--- a/src/dr/evomodel/antigenic/AntigenicLikelihood.java
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
@@ -1,29 +1,4 @@
-/*
- * AntigenicLikelihood.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.evomodel.antigenic;
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
 
 import dr.evolution.util.*;
 import dr.inference.model.*;
@@ -33,12 +8,12 @@ import dr.math.distributions.NormalDistribution;
 import dr.util.*;
 import dr.xml.*;
 
-
 import java.io.*;
 import java.util.*;
 import java.util.logging.Logger;
 
 /**
+ * @author Charles Cheung
  * @author Andrew Rambaut
  * @author Trevor Bedford
  * @author Marc Suchard
@@ -50,12 +25,12 @@ import java.util.logging.Logger;
     Offset is set to 0 for the earliest virus and increasing with difference in date from earliest virus.
     This makes the raw virusLocations and serumLocations parameters not directly interpretable.
 */
-public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
+public class AGLikelihoodCluster extends AbstractModelLikelihood implements Citable {
     private static final boolean CHECK_INFINITE = false;
     private static final boolean USE_THRESHOLDS = true;
     private static final boolean USE_INTERVALS = true;
 
-    public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
+    public final static String AG_LIKELIHOOD = "aglikelihoodcluster";
 
     // column indices in table
     private static final int VIRUS_ISOLATE = 0;
@@ -65,6 +40,8 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     private static final int SERUM_STRAIN = 4;
     private static final int SERUM_DATE = 5;
     private static final int TITRE = 6;
+    
+
 
     public enum MeasurementType {
         INTERVAL,
@@ -73,7 +50,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         MISSING
     }
 
-    public AntigenicLikelihood(
+    public AGLikelihoodCluster(
             int mdsDimension,
             Parameter mdsPrecisionParameter,
             Parameter locationDriftParameter,
@@ -90,16 +67,17 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             DataTable<String[]> dataTable,
             boolean mergeSerumIsolates,
             double intervalWidth,
-            double driftInitialLocations) {
+            double driftInitialLocations, 
+            boolean clusterMeans,
+            Parameter clusterOffsetsParameter) {
 
-        super(ANTIGENIC_LIKELIHOOD);
+        super(AG_LIKELIHOOD);
 
         this.intervalWidth = intervalWidth;
         boolean useIntervals = USE_INTERVALS && intervalWidth > 0.0;
 
         int thresholdCount = 0;
-
-
+             
         double earliestDate = Double.POSITIVE_INFINITY;
         for (int i = 0; i < dataTable.getRowCount(); i++) {
 
@@ -127,7 +105,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                 serumDates.add(serumDate);
                 serum = serumNames.size() - 1;
             }
-
+            
             boolean isThreshold = false;
             boolean isLowerThreshold = false;
             double rawTitre = Double.NaN;
@@ -250,8 +228,48 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         logLikelihoods = new double[measurements.size()];
         storedLogLikelihoods = new double[measurements.size()];
 
+       // driftInitialLocations = 1; //charles added - now specified in the xml
         setupInitialLocations(driftInitialLocations);
-
+   //     loadInitialLocations(virusNames, serumNames);
+        
+        //System.out.println("Print now!");
+		//      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {    	  
+		 //   	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+		  //  	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
+		   //   }
+		   //   System.out.println("");
+     
+
+		        if(clusterMeans){
+		        	this.clusterMeans = clusterMeans;
+		        	this.clusterOffsetsParameter = clusterOffsetsParameter;
+		        	
+		        	
+		        	//if(clusterOffsetsParameter != null){
+		        	//System.out.println("virusNames.size()="+ virusNames.size());
+		        	//clusterOffsetsParameter.setDimension( virusNames.size());  
+		        //    for (int i = 0; i < virusNames.size(); i++) {
+		           // 	clusterOffsetsParameter.setId(virusNames.get(i));
+		           // }
+		            //addVariable(clusterOffsetsParameter);
+		        	//}
+		        	
+		        	//stay null
+		           if (clusterOffsetsParameter == null) {
+		            //	clusterOffsetsParameter = new Parameter.Default("clusterOffsets");
+		            } else {
+		            	//clusterOffsetsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1000));
+		                addVariable(clusterOffsetsParameter);
+			            clusterOffsetsParameter.setDimension(virusNames.size());
+
+		            }
+		        	
+		        	
+		        	System.out.println(" clusterMeans = true");
+		        	//System.exit(0);
+		        }
+
+		      
         makeDirty();
     }
 
@@ -317,6 +335,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     private void setupOffsetsParameter(Parameter offsetsParameter, List<String> strainNames, List<Double> strainDates, double earliest) {
+
         offsetsParameter.setDimension(strainNames.size());
         String[] labelArray = new String[strainNames.size()];
         strainNames.toArray(labelArray);
@@ -378,13 +397,21 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     private void setupInitialLocations(double drift) {
+    	//System.out.println("hihi");
         for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
             double offset = 0.0;
             if (virusOffsetsParameter != null) {
+            	//System.out.print("virus Offset Parameter present"+ ": ");
+            	//System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+            	//System.out.print(" drift= " + drift + " ");
                 offset = drift * virusOffsetsParameter.getParameterValue(i);
             }
+            else{
+            	System.out.println("virus Offeset Parameter NOT present");
+            }
             double r = MathUtils.nextGaussian() + offset;
             virusLocationsParameter.getParameter(i).setParameterValue(0, r);
+           // System.out.println (  virusLocationsParameter.getParameter(i).getParameterValue(0));
             if (mdsDimension > 1) {
                 for (int j = 1; j < mdsDimension; j++) {
                     r = MathUtils.nextGaussian();
@@ -407,6 +434,102 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             }
         }
     }
+    
+    //load initial
+    private void loadInitialLocations(List<String> strainNames, List<String> serumNames) {
+
+		FileReader fileReader;
+		try {
+			//fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2_mds.virusLocs.log");
+			fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2_mds.virusLocs.log");
+		     /**
+		       * Creating a buffered reader to read the file
+		       */
+		      BufferedReader bReader = new BufferedReader( fileReader);
+
+		      String line;
+
+		      
+		      //this routine may give false results if there are extra lines with spaces
+		      
+		      line = bReader.readLine();
+		      System.out.println(line);
+		      String namevalue[] = line.split("\t");
+
+		      
+		      line = bReader.readLine();
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+		          
+		      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
+		    	  
+		    	  int index = findStrain( namevalue[i*2+1], strainNames);  //note. namevalue actually has the extra 1 or 2attached to it.. but it doesn't seem to matter
+		    //	  System.out.println("name: " + virusLocationsParameter.getParameter(i).getParameterName() + " :" + index);
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  virusLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+		    	  virusLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
+
+		      }
+		      bReader.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}          
+ 
+
+		FileReader fileReader2;
+		try {
+			//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+			fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2.serumLocs.log");
+			
+		     /**
+		       * Creating a buffered reader to read the file
+		       */
+		      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+		      String line;
+		      
+		      line = bReader2.readLine();
+		      System.out.println(line);
+		      String namevalue[] = line.split("\t");
+
+		      
+		      line = bReader2.readLine();
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+		       //   System.out.println(serumLocationsParameter.getParameterCount());
+		      for (int i = 0; i < serumLocationsParameter.getParameterCount(); i++) {
+		    	  int index = findStrain( namevalue[i*2+1], serumNames);
+
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  serumLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+		    	  serumLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+		   	  
+		      }
+		      bReader2.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}        
+      
+    	
+
+    }
+    
 
     @Override
     protected void handleModelChangedEvent(Model model, Object object, int index) {
@@ -470,15 +593,20 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     public double getLogLikelihood() {
+ //uncommenting for testing only
+    		
         if (!likelihoodKnown) {
             logLikelihood = computeLogLikelihood();
         }
-
+        
+// logLikelihood=0;       //for testing purpose only
+//System.out.println("logLikelihood of AGLikelihoodCluster= " + logLikelihood);
         return logLikelihood;
     }
 
     // This function can be overwritten to implement other sampling densities, i.e. discrete ranks
     private double computeLogLikelihood() {
+    	    	
 
         double precision = mdsPrecisionParameter.getParameterValue(0);
         double sd = 1.0 / Math.sqrt(precision);
@@ -549,7 +677,6 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
 
     // offset virus and serum location when computing
     protected double computeDistance(int virus, int serum) {
-
         Parameter vLoc = virusLocationsParameter.getParameter(virus);
         Parameter sLoc = serumLocationsParameter.getParameter(serum);
         double sum = 0.0;
@@ -557,20 +684,45 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         // first dimension is shifted
         double vxOffset = 0.0;
         double sxOffset = 0.0;
-        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
-            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
-            sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
-        }
-        if (virusDriftParameter != null && virusOffsetsParameter != null) {
-            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+        if(clusterMeans == true){      	
+        	
+        	
+        	if(virusDriftParameter!= null && virusOffsetsParameter != null && serumOffsetsParameter != null && clusterOffsetsParameter!=null){
+                vxOffset = virusDriftParameter.getParameterValue(0)* clusterOffsetsParameter.getParameterValue(virus);
+        		sxOffset = virusDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+                //vxOffset = locationDriftParameter.getParameterValue(0)*  ;               
+           //     System.out.println("clusterOffset =" + clusterOffsetsParameter.getParameterValue(virus));
+                 	//System.out.println("offset = " + vxOffset);
+                 
+        	}
+        	
+        	//overwrite serum drift
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	        //	System.out.println("hihi ya");
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        
         }
-        if (serumDriftParameter != null && serumOffsetsParameter != null) {
-            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+        else{
+	        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
+	            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+                sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        if (virusDriftParameter != null && virusOffsetsParameter != null) {
+	            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+	        }
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
         }
 
         double vxLoc = vLoc.getParameterValue(0) + vxOffset;
         double sxLoc = sLoc.getParameterValue(0) + sxOffset;
 
+       // if(virus ==1){
+        //	System.out.println("virus " + virus + " has vxLoc of " + vxLoc + " = " + vLoc.getParameterValue(0) + "+" + vxOffset);
+        //}
+        
         double difference = vxLoc - sxLoc;
         sum += difference * difference;
 
@@ -720,6 +872,9 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     private final boolean[] virusEffectChanged;
     private double[] logLikelihoods;
     private double[] storedLogLikelihoods;
+    
+    private boolean clusterMeans = false;
+    private Parameter clusterOffsetsParameter;
 
 // **************************************************************
 // XMLObjectParser
@@ -744,9 +899,11 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         public static final String SERUM_BREADTHS = "serumBreadths";
         public final static String VIRUS_OFFSETS = "virusOffsets";
         public final static String SERUM_OFFSETS = "serumOffsets";
+        public final static String CLUSTER_MEANS = "clusterMeans";
+        public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
 
         public String getParserName() {
-            return ANTIGENIC_LIKELIHOOD;
+            return AG_LIKELIHOOD;
         }
 
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
@@ -761,6 +918,8 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             System.out.println("Loaded HI table file: " + fileName);
 
             boolean mergeSerumIsolates = xo.getAttribute(MERGE_SERUM_ISOLATES, false);
+            
+            boolean cluster_means = xo.getAttribute(CLUSTER_MEANS, false);
 
             int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
             double intervalWidth = 0.0;
@@ -797,12 +956,12 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
 
             Parameter virusDrift = null;
             if (xo.hasChildNamed(VIRUS_DRIFT)) {
-                virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
+            	virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
             }
 
             Parameter serumDrift = null;
             if (xo.hasChildNamed(SERUM_DRIFT)) {
-                serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
+            	serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
             }
 
             Parameter virusOffsetsParameter = null;
@@ -829,8 +988,14 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             if (xo.hasChildNamed(VIRUS_AVIDITIES)) {
                 virusAviditiesParameter = (Parameter) xo.getElementFirstChild(VIRUS_AVIDITIES);
             }
+            
+            Parameter clusterOffsetsParameter = null;
+            if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+            	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+            }
+
 
-            AntigenicLikelihood AGL = new AntigenicLikelihood(
+            AGLikelihoodCluster AGL = new AGLikelihoodCluster(
                     mdsDimension,
                     mdsPrecision,
                     locationDrift,
@@ -847,7 +1012,10 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                     assayTable,
                     mergeSerumIsolates,
                     intervalWidth,
-                    driftInitialLocations);
+                    driftInitialLocations, 
+                    cluster_means, 
+                    clusterOffsetsParameter);
+                        
 
             Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
 
@@ -884,11 +1052,13 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                 new ElementRule(MDS_PRECISION, Parameter.class, "Parameter for precision of MDS embedding"),
                 new ElementRule(LOCATION_DRIFT, Parameter.class, "Optional parameter for drifting locations with time", true),
                 new ElementRule(VIRUS_DRIFT, Parameter.class, "Optional parameter for drifting only virus locations, overrides locationDrift", true),
-                new ElementRule(SERUM_DRIFT, Parameter.class, "Optional parameter for drifting only serum locations, overrides locationDrift", true)
+                new ElementRule(SERUM_DRIFT, Parameter.class, "Optional parameter for drifting only serum locations, overrides locationDrift", true),
+                AttributeRule.newBooleanRule(CLUSTER_MEANS, true, "Should we use cluster means to control the virus locations"),
+               new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),                
         };
 
         public Class getReturnType() {
-            return AntigenicLikelihood.class;
+            return AGLikelihoodCluster.class;
         }
     };
 
@@ -914,16 +1084,5 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         return citations;
     }
 
-    public static void main(String[] args) {
-        double[] titres = {0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0};
 
-        System.out.println("titre\tpoint\tinterval(tail)\tinterval(cdf)\tthreshold");
-        for (double titre : titres) {
-            double point = AntigenicLikelihood.computeMeasurementLikelihood(titre, 0.0, 1.0);
-            double interval = AntigenicLikelihood.computeMeasurementIntervalLikelihood(titre + 1.0, titre, 0.0, 1.0);
-            double threshold = AntigenicLikelihood.computeMeasurementThresholdLikelihood(titre, 0.0, 1.0);
-
-            System.out.println(titre + "\t" + point + "\t" + interval + "\t" + threshold);
-        }
-    }
 }
diff --git a/src/dr/evomodel/antigenic/AntigenicLikelihood.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
similarity index 57%
copy from src/dr/evomodel/antigenic/AntigenicLikelihood.java
copy to src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
index 351567f..423be5f 100644
--- a/src/dr/evomodel/antigenic/AntigenicLikelihood.java
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
@@ -1,29 +1,4 @@
-/*
- * AntigenicLikelihood.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.evomodel.antigenic;
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
 
 import dr.evolution.util.*;
 import dr.inference.model.*;
@@ -33,12 +8,12 @@ import dr.math.distributions.NormalDistribution;
 import dr.util.*;
 import dr.xml.*;
 
-
 import java.io.*;
 import java.util.*;
 import java.util.logging.Logger;
 
 /**
+ * @author Charles Cheung
  * @author Andrew Rambaut
  * @author Trevor Bedford
  * @author Marc Suchard
@@ -50,12 +25,12 @@ import java.util.logging.Logger;
     Offset is set to 0 for the earliest virus and increasing with difference in date from earliest virus.
     This makes the raw virusLocations and serumLocations parameters not directly interpretable.
 */
-public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
+public class AGLikelihoodTreeCluster extends AbstractModelLikelihood implements Citable {
     private static final boolean CHECK_INFINITE = false;
     private static final boolean USE_THRESHOLDS = true;
     private static final boolean USE_INTERVALS = true;
 
-    public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
+    public final static String AG_LIKELIHOOD = "aglikelihoodtreecluster";
 
     // column indices in table
     private static final int VIRUS_ISOLATE = 0;
@@ -65,6 +40,9 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     private static final int SERUM_STRAIN = 4;
     private static final int SERUM_DATE = 5;
     private static final int TITRE = 6;
+    
+
+    private  double oldLogLikelihood =0;
 
     public enum MeasurementType {
         INTERVAL,
@@ -73,7 +51,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         MISSING
     }
 
-    public AntigenicLikelihood(
+    public AGLikelihoodTreeCluster(
             int mdsDimension,
             Parameter mdsPrecisionParameter,
             Parameter locationDriftParameter,
@@ -90,16 +68,17 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             DataTable<String[]> dataTable,
             boolean mergeSerumIsolates,
             double intervalWidth,
-            double driftInitialLocations) {
+            double driftInitialLocations, 
+            boolean clusterMeans,
+            Parameter clusterOffsetsParameter) {
 
-        super(ANTIGENIC_LIKELIHOOD);
+        super(AG_LIKELIHOOD);
 
         this.intervalWidth = intervalWidth;
         boolean useIntervals = USE_INTERVALS && intervalWidth > 0.0;
 
         int thresholdCount = 0;
-
-
+             
         double earliestDate = Double.POSITIVE_INFINITY;
         for (int i = 0; i < dataTable.getRowCount(); i++) {
 
@@ -127,7 +106,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                 serumDates.add(serumDate);
                 serum = serumNames.size() - 1;
             }
-
+            
             boolean isThreshold = false;
             boolean isLowerThreshold = false;
             double rawTitre = Double.NaN;
@@ -250,11 +229,140 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         logLikelihoods = new double[measurements.size()];
         storedLogLikelihoods = new double[measurements.size()];
 
-        setupInitialLocations(driftInitialLocations);
-
+       // driftInitialLocations = 1; //charles added - now specified in the xml
+   //     setupInitialLocations(driftInitialLocations);
+   //     loadInitialLocations(virusNames, serumNames);
+        
+        //System.out.println("Print now!");
+		//      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {    	  
+		 //   	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+		  //  	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
+		   //   }
+		   //   System.out.println("");
+     
+
+		        if(clusterMeans){
+		        	this.clusterMeans = clusterMeans;
+		        	this.clusterOffsetsParameter = clusterOffsetsParameter;
+		        	
+		        	
+		        	//if(clusterOffsetsParameter != null){
+		        	//System.out.println("virusNames.size()="+ virusNames.size());
+		        	//clusterOffsetsParameter.setDimension( virusNames.size());  
+		        //    for (int i = 0; i < virusNames.size(); i++) {
+		           // 	clusterOffsetsParameter.setId(virusNames.get(i));
+		           // }
+		            //addVariable(clusterOffsetsParameter);
+		        	//}
+		        	
+		        	//stay null
+		           if (clusterOffsetsParameter == null) {
+		            //	clusterOffsetsParameter = new Parameter.Default("clusterOffsets");
+		            } else {
+		            	//clusterOffsetsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1000));
+		                addVariable(clusterOffsetsParameter);
+			            clusterOffsetsParameter.setDimension(virusNames.size());
+
+		            }
+		        	
+		        	
+		        	System.out.println(" clusterMeans = true");
+		        	//System.exit(0);
+		        }
+
+		      
         makeDirty();
+        
+        loadInitialSerumLocations(serumNames);
+        
+        
+        System.out.println("======================================");
+    	double sum=0;
+    	double s2 = 0;
+		for(int i=0; i <  serumLocationsParameter.getParameterCount(); i++){
+			sum+=serumLocationsParameter.getParameter(i).getParameterValue(0);
+			s2+=serumLocationsParameter.getParameter(i).getParameterValue(1);
+		}
+		System.out.println("sum sera location dimension 1 = " + sum);
+		System.out.println("sum sera location dimension 2 = " + s2);
+
+        
+        
+        System.out.println("======================================");
+       // System.exit(0);
+        
+        
     }
 
+    
+    
+    //load initial serum location - load the last line
+    private void loadInitialSerumLocations(List<String> serumNames) {
+
+		FileReader fileReader2;
+		try {
+//			fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test23/run4/H3N2_mds.serumLocs.log");
+			//fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run64/H3N2_mds.serumLocs.log");
+			fileReader2 = new FileReader("/Users/charles/Documents/researchData/clustering/output/test25/run79/H3N2_mds.serumLocs.log");
+		     /**
+		       * Creating a buffered reader to read the file
+		       */
+		      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+		      String line;
+		      line = bReader2.readLine();
+		      line = bReader2.readLine();
+		      
+		      line = bReader2.readLine();
+		      System.out.println(line);
+		      String namevalue[] = line.split("\t");
+
+		      
+		      //skip to the last line
+		      String testLine;
+		      while ((testLine = bReader2.readLine()) != null){
+		    	  line = testLine;
+		      }
+
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+
+		   //   double sumDim2=0;
+		       //   System.out.println(serumLocationsParameter.getParameterCount());
+		      for (int i = 0; i < serumLocationsParameter.getParameterCount(); i++) {
+		    	  //int index = findStrain( namevalue[i*2+1], serumNames);  //don't enable this.. this will cause a bug because the serumNames are not unique.
+		    	  //System.out.println("index=" + index);
+		    	  
+		    	  double dim1 = Double.parseDouble(datavalue[i*2+1])- serumDriftParameter.getParameterValue(0)*serumOffsetsParameter.getParameterValue(i);
+		    	  double dim2 = Double.parseDouble(datavalue[i*2+2]);
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  serumLocationsParameter.getParameter(i).setParameterValue(0, dim1);
+		    	  serumLocationsParameter.getParameter(i).setParameterValue(1, dim2);
+		    //	  sumDim2+= dim2;
+		    //	  System.out.print(dim2 + "\t");
+		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+		   	  
+		      }
+		  //    System.out.println("\n sum dim2 = " + sumDim2);
+//	    	  System.exit(0);
+
+		      bReader2.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}        
+      
+    	
+
+    }
+        
+    
+    
     private Parameter setupVirusAvidities(Parameter virusAviditiesParameter) {
         // If no row parameter is given, then we will only use the serum effects
         if (virusAviditiesParameter != null) {
@@ -317,6 +425,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     private void setupOffsetsParameter(Parameter offsetsParameter, List<String> strainNames, List<Double> strainDates, double earliest) {
+
         offsetsParameter.setDimension(strainNames.size());
         String[] labelArray = new String[strainNames.size()];
         strainNames.toArray(labelArray);
@@ -378,13 +487,21 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     private void setupInitialLocations(double drift) {
+    	//System.out.println("hihi");
         for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
             double offset = 0.0;
             if (virusOffsetsParameter != null) {
+            	//System.out.print("virus Offset Parameter present"+ ": ");
+            	//System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+            	//System.out.print(" drift= " + drift + " ");
                 offset = drift * virusOffsetsParameter.getParameterValue(i);
             }
+            else{
+            	System.out.println("virus Offeset Parameter NOT present");
+            }
             double r = MathUtils.nextGaussian() + offset;
             virusLocationsParameter.getParameter(i).setParameterValue(0, r);
+           // System.out.println (  virusLocationsParameter.getParameter(i).getParameterValue(0));
             if (mdsDimension > 1) {
                 for (int j = 1; j < mdsDimension; j++) {
                     r = MathUtils.nextGaussian();
@@ -408,6 +525,104 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         }
     }
 
+    
+    
+    //load initial
+    private void loadInitialLocations(List<String> strainNames, List<String> serumNames) {
+
+		FileReader fileReader;
+		try {
+			//fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2_mds.virusLocs.log");
+			fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2_mds.virusLocs.log");
+		     /**
+		       * Creating a buffered reader to read the file
+		       */
+		      BufferedReader bReader = new BufferedReader( fileReader);
+
+		      String line;
+
+		      
+		      //this routine may give false results if there are extra lines with spaces
+		      
+		      line = bReader.readLine();
+		      System.out.println(line);
+		      String namevalue[] = line.split("\t");
+
+		      
+		      line = bReader.readLine();
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+		          
+		      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
+		    	  
+		    	  int index = findStrain( namevalue[i*2+1], strainNames);  //note. namevalue actually has the extra 1 or 2attached to it.. but it doesn't seem to matter
+		    //	  System.out.println("name: " + virusLocationsParameter.getParameter(i).getParameterName() + " :" + index);
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  virusLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+		    	  virusLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
+
+		      }
+		      bReader.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}          
+ 
+
+		FileReader fileReader2;
+		try {
+			//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+			fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2.serumLocs.log");
+			
+		     /**
+		       * Creating a buffered reader to read the file
+		       */
+		      BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+		      String line;
+		      
+		      line = bReader2.readLine();
+		      System.out.println(line);
+		      String namevalue[] = line.split("\t");
+
+		      
+		      line = bReader2.readLine();
+		      System.out.println(line);
+		      
+		      String datavalue[] = line.split("\t");
+		       //   System.out.println(serumLocationsParameter.getParameterCount());
+		      for (int i = 0; i < serumLocationsParameter.getParameterCount(); i++) {
+		    	  int index = findStrain( namevalue[i*2+1], serumNames);
+
+		    	 // System.out.println(datavalue[i*2+1]);
+		    	  serumLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+		    	  serumLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+		   	  
+		      }
+		      bReader2.close();
+		
+		} catch (FileNotFoundException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}        
+      
+    	
+
+    }
+    
+
     @Override
     protected void handleModelChangedEvent(Model model, Object object, int index) {
     }
@@ -470,19 +685,31 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     }
 
     public double getLogLikelihood() {
-        if (!likelihoodKnown) {
+ //uncommenting for testing only
+
+    		
+    		//System.exit(0);
+      //  if (!likelihoodKnown) {
             logLikelihood = computeLogLikelihood();
-        }
+     //   }
+        
+    	//System.out.println("		logL of AGLikelihoodTreeCluster =" + logLikelihood);
+    	//System.out.println(" 	p(new)/p(old) = " + Math.exp(logLikelihood - oldLogLikelihood) );
+         //oldLogLikelihood = logLikelihood;
+
 
+// logLikelihood=0;       //for testing purpose only
+//System.out.println("logLikelihood of AGLikelihoodCluster= " + logLikelihood);
         return logLikelihood;
     }
 
     // This function can be overwritten to implement other sampling densities, i.e. discrete ranks
     private double computeLogLikelihood() {
+    	    	
 
         double precision = mdsPrecisionParameter.getParameterValue(0);
         double sd = 1.0 / Math.sqrt(precision);
-
+        
         logLikelihood = 0.0;
         int i = 0;
 
@@ -491,7 +718,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             if (virusLocationChanged[measurement.virus] || serumLocationChanged[measurement.serum] || virusEffectChanged[measurement.virus] || serumEffectChanged[measurement.serum]) {
 
                 double expectation = calculateBaseline(measurement.virus, measurement.serum) - computeDistance(measurement.virus, measurement.serum);
-
+                
                 switch (measurement.type) {
                     case INTERVAL: {
                         double minTitre = measurement.log2Titre;
@@ -516,7 +743,7 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             logLikelihood += logLikelihoods[i];
             i++;
         }
-
+//System.out.println("\nlogLikelihood sum = " + logLikelihood);
         likelihoodKnown = true;
 
         setLocationChangedFlags(false);
@@ -549,7 +776,6 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
 
     // offset virus and serum location when computing
     protected double computeDistance(int virus, int serum) {
-
         Parameter vLoc = virusLocationsParameter.getParameter(virus);
         Parameter sLoc = serumLocationsParameter.getParameter(serum);
         double sum = 0.0;
@@ -557,20 +783,44 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         // first dimension is shifted
         double vxOffset = 0.0;
         double sxOffset = 0.0;
-        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
-            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
-            sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+        if(clusterMeans == true){      	
+        	
+        	if(virusDriftParameter!= null && virusOffsetsParameter != null && serumOffsetsParameter != null && clusterOffsetsParameter!=null){
+//                vxOffset = virusDriftParameter.getParameterValue(0)* clusterOffsetsParameter.getParameterValue(virus);
+        		sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+                //vxOffset = locationDriftParameter.getParameterValue(0)*  ;               
+           //     System.out.println("clusterOffset =" + clusterOffsetsParameter.getParameterValue(virus));
+                 	//System.out.println("offset = " + vxOffset);
+                 
+        	}
+        	
+        	//overwrite serum drift
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	        //	System.out.println("hihi ya");
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        
         }
-        if (virusDriftParameter != null && virusOffsetsParameter != null) {
-            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
-        }
-        if (serumDriftParameter != null && serumOffsetsParameter != null) {
-            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+        else{
+	        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
+	            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+                sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        if (virusDriftParameter != null && virusOffsetsParameter != null) {
+	            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+	        }
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
         }
 
         double vxLoc = vLoc.getParameterValue(0) + vxOffset;
         double sxLoc = sLoc.getParameterValue(0) + sxOffset;
 
+       // if(virus ==1){
+        //	System.out.println("virus " + virus + " has vxLoc of " + vxLoc + " = " + vLoc.getParameterValue(0) + "+" + vxOffset);
+        //}
+        
         double difference = vxLoc - sxLoc;
         sum += difference * difference;
 
@@ -586,9 +836,89 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             double serumBreadth = serumBreadthsParameter.getParameterValue(serum);
             dist /= serumBreadth;
         }
+        
+        
+        
+        //if(serum ==0){
+        	//System.out.println("The serum location is " + sxLoc +"," + sLoc.getParameterValue(1));
+       // }
+        
+        return(dist);
+    }
+    
+    
+    
+    
+    // offset virus and serum location when computing
+    protected double computeDistanceBasedOnArray(int virus, int serum, double[][] virusLocArray) {
+       // Parameter vLoc = virusLocationsParameter.getParameter(virus);
+        Parameter sLoc = serumLocationsParameter.getParameter(serum);
+        double sum = 0.0;
+
+        // first dimension is shifted
+        double vxOffset = 0.0;
+        double sxOffset = 0.0;
+        if(clusterMeans == true){      	
+        	
+        	
+        	if(virusDriftParameter!= null && virusOffsetsParameter != null && serumOffsetsParameter != null && clusterOffsetsParameter!=null){
+                vxOffset = virusDriftParameter.getParameterValue(0)* clusterOffsetsParameter.getParameterValue(virus);
+        		sxOffset = virusDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+                //vxOffset = locationDriftParameter.getParameterValue(0)*  ;               
+           //     System.out.println("clusterOffset =" + clusterOffsetsParameter.getParameterValue(virus));
+                 	//System.out.println("offset = " + vxOffset);
+                 
+        	}
+        	
+        	//overwrite serum drift
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	        //	System.out.println("hihi ya");
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        
+        }
+        else{
+	        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
+	            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+                sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+	        if (virusDriftParameter != null && virusOffsetsParameter != null) {
+	            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+	        }
+	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
+	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+	        }
+        }
+
+        //double vxLoc = vLoc.getParameterValue(0) + vxOffset;
+        double vxLoc = virusLocArray[virus][0] + vxOffset;
+        double sxLoc = sLoc.getParameterValue(0) + sxOffset;
+
+       // if(virus ==1){
+        //	System.out.println("virus " + virus + " has vxLoc of " + vxLoc + " = " + vLoc.getParameterValue(0) + "+" + vxOffset);
+        //}
+        
+        double difference = vxLoc - sxLoc;
+        sum += difference * difference;
+
+        // other dimensions are not
+        for (int i = 1; i < mdsDimension; i++) {
+           // difference = vLoc.getParameterValue(i) - sLoc.getParameterValue(i);
+        	 difference = virusLocArray[virus][i] - sLoc.getParameterValue(i);
+            sum += difference * difference;
+        }
+
+        double dist = Math.sqrt(sum);
+
+        if (serumBreadthsParameter != null) {
+            double serumBreadth = serumBreadthsParameter.getParameterValue(serum);
+            dist /= serumBreadth;
+        }
 
         return dist;
     }
+    
+    
 
 
     // Calculates the expected log2 titre when mapDistance = 0
@@ -720,6 +1050,9 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
     private final boolean[] virusEffectChanged;
     private double[] logLikelihoods;
     private double[] storedLogLikelihoods;
+    
+    private boolean clusterMeans = false;
+    private Parameter clusterOffsetsParameter;
 
 // **************************************************************
 // XMLObjectParser
@@ -744,9 +1077,11 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         public static final String SERUM_BREADTHS = "serumBreadths";
         public final static String VIRUS_OFFSETS = "virusOffsets";
         public final static String SERUM_OFFSETS = "serumOffsets";
+        public final static String CLUSTER_MEANS = "clusterMeans";
+        public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
 
         public String getParserName() {
-            return ANTIGENIC_LIKELIHOOD;
+            return AG_LIKELIHOOD;
         }
 
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
@@ -761,6 +1096,8 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             System.out.println("Loaded HI table file: " + fileName);
 
             boolean mergeSerumIsolates = xo.getAttribute(MERGE_SERUM_ISOLATES, false);
+            
+            boolean cluster_means = xo.getAttribute(CLUSTER_MEANS, false);
 
             int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
             double intervalWidth = 0.0;
@@ -797,12 +1134,12 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
 
             Parameter virusDrift = null;
             if (xo.hasChildNamed(VIRUS_DRIFT)) {
-                virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
+            	virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
             }
 
             Parameter serumDrift = null;
             if (xo.hasChildNamed(SERUM_DRIFT)) {
-                serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
+            	serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
             }
 
             Parameter virusOffsetsParameter = null;
@@ -829,8 +1166,14 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
             if (xo.hasChildNamed(VIRUS_AVIDITIES)) {
                 virusAviditiesParameter = (Parameter) xo.getElementFirstChild(VIRUS_AVIDITIES);
             }
+            
+            Parameter clusterOffsetsParameter = null;
+            if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+            	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+            }
 
-            AntigenicLikelihood AGL = new AntigenicLikelihood(
+
+            AGLikelihoodTreeCluster AGL = new AGLikelihoodTreeCluster(
                     mdsDimension,
                     mdsPrecision,
                     locationDrift,
@@ -847,7 +1190,10 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                     assayTable,
                     mergeSerumIsolates,
                     intervalWidth,
-                    driftInitialLocations);
+                    driftInitialLocations, 
+                    cluster_means, 
+                    clusterOffsetsParameter);
+                        
 
             Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
 
@@ -884,11 +1230,13 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
                 new ElementRule(MDS_PRECISION, Parameter.class, "Parameter for precision of MDS embedding"),
                 new ElementRule(LOCATION_DRIFT, Parameter.class, "Optional parameter for drifting locations with time", true),
                 new ElementRule(VIRUS_DRIFT, Parameter.class, "Optional parameter for drifting only virus locations, overrides locationDrift", true),
-                new ElementRule(SERUM_DRIFT, Parameter.class, "Optional parameter for drifting only serum locations, overrides locationDrift", true)
+                new ElementRule(SERUM_DRIFT, Parameter.class, "Optional parameter for drifting only serum locations, overrides locationDrift", true),
+                AttributeRule.newBooleanRule(CLUSTER_MEANS, true, "Should we use cluster means to control the virus locations"),
+               new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),                
         };
 
         public Class getReturnType() {
-            return AntigenicLikelihood.class;
+            return AGLikelihoodTreeCluster.class;
         }
     };
 
@@ -914,16 +1262,318 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         return citations;
     }
 
-    public static void main(String[] args) {
-        double[] titres = {0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0};
+    
+    
+    
+    public double getLogLikelihoodBasedOnPrecompute(int[] clusterLabel, int numClusters, int[] oldObservationCluster, double[] oldContribution, int[] newObservationCluster, double[] newContribution) {
+	   	
+        double precision = mdsPrecisionParameter.getParameterValue(0);
+        double sd = 1.0 / Math.sqrt(precision);
+    			
+	     for (int i=0; i < measurements.size(); i++) {
+	    	 newObservationCluster[i] = clusterLabel[measurements.get(i).virus];
+	     }
+	     
+		double logL=0;
+		for(int i=0; i< measurements.size(); i++ ){
+			if(oldObservationCluster != null && newObservationCluster[i] == oldObservationCluster[i]){
+				
+				double newC = computeContribution(measurements.get(i), precision, sd);
+				if(newC != oldContribution[i]){
+					System.out.println("newObservationCluster[i]=" + newObservationCluster[i] + " and old =" + oldObservationCluster[i]);
+					System.out.println("old contribution of i=" + i + " is " + oldContribution[i] + " but new is " + newC);
+					
+					System.out.println("They should be the same. Why are they different?");
+					System.exit(0);
+				}
+				
+			//	System.out.println("run!");
+				newContribution[i] = oldContribution[i];				
+			}
+			else{
+				//recompute.
+				newContribution[i] = computeContribution(measurements.get(i), precision, sd);
+			}
+			logL += newContribution[i];
+			
+		}
+		
+		
+			
+		return(logL);
+	}
+    
+    
+    
+    public double computeContribution(Measurement measurement, double precision, double sd){
+    		double curLogL=0;
+        
+
+            double expectation = calculateBaseline(measurement.virus, measurement.serum) - computeDistance(measurement.virus, measurement.serum);
+            switch (measurement.type) {
+                case INTERVAL: {
+
+                    double minTitre = measurement.log2Titre;
+                    double maxTitre = measurement.log2Titre + intervalWidth;
+                    curLogL = computeMeasurementIntervalLikelihood(minTitre, maxTitre, expectation, sd);
+                } break;
+                case POINT: {
+
+                	curLogL = computeMeasurementLikelihood(measurement.log2Titre, expectation, sd);
+                } break;
+                case THRESHOLD: {
+
+                	if(measurement.isLowerThreshold){
+                		curLogL = computeMeasurementThresholdLikelihood(measurement.log2Titre, expectation, sd);
+                	}
+                	else{
+                		curLogL = computeMeasurementUpperThresholdLikelihood(measurement.log2Titre, expectation, sd);                  		
+                	}
+                } break;
+                case MISSING:
+                    break;
+            }
+        
+
+    return curLogL;
+    }
+    
+    
+    
+    
+    
+    
+    public double getClusterLogLikelihoodUpdate(int[] clusterLabel, int numClusters, int[] oldObservationCluster, double[] oldContribution, int[] newObservationCluster, double[] newContribution, double[] oldClusterSum, double[] newClusterSum) {
+
+        double precision = mdsPrecisionParameter.getParameterValue(0);
+        double sd = 1.0 / Math.sqrt(precision);
+    	
+       	
+		List<Measurement>[] partition = new List[clusterLabel.length]; // the size may be an overkill, but it's ok
+		for(int i=0; i < clusterLabel.length; i++){
+			partition[i] = new LinkedList();
+		}        
+    
+		int []needUpdateCluster = new int[numClusters];
+		
+		if(oldObservationCluster == null){
+			for (int i=0; i < measurements.size(); i++) {
+		    	Measurement m = measurements.get(i);
+		    	newObservationCluster[i] = clusterLabel[m.virus];
+		    	partition[clusterLabel[m.virus]].add(m);  // this is correct
+		     }
+			for(int i=0; i<numClusters; i++){
+				needUpdateCluster[i] = 1;
+			}
+		}
+		else{
+	     for (int i=0; i < measurements.size(); i++) {
+	    	 Measurement m = measurements.get(i);
+	    	 newObservationCluster[i] = clusterLabel[m.virus];
+	    	 partition[clusterLabel[m.virus]].add(m);  // this is correct
+	    	 
+	    	 if(newObservationCluster[i] != oldObservationCluster[i]){
+	    		 needUpdateCluster[newObservationCluster[i]] = 1;
+	    		 needUpdateCluster[oldObservationCluster[i]] = 1;
+	    	 }
+	     }
+		}
+		
+		
+		//for(int i=0; i < needUpdateCluster.length; i++){
+		//	System.out.println("update cluster " +i + "="+ needUpdateCluster[i] );
+		//}
+	     //System.out.println("==================");
+		
+		//Method 1
+		/*
+		double logL=0;
+		for(int i=0; i< measurements.size(); i++ ){
+			if(oldContribution != null && needUpdateCluster[newObservationCluster[i]]==0){
+				//System.out.println("don't need to update cluster");
+				newContribution[i] = oldContribution[i];				
+			}
+			else{
+				//recompute.
+				newContribution[i] = computeContribution(measurements.get(i), precision, sd);
+			}
+			logL += newContribution[i];
+			
+		}
+		*/
+		
+		
+		//Method 2
+		
+		double logL=0;
+   		for(int i=0; i < numClusters; i++ ){			
+			if(partition[i].size() >0){
+				if(needUpdateCluster[i] == 0 && oldClusterSum != null){
+					//update the cluster i's contribution
+					newClusterSum[i] += oldClusterSum[i];
+				}
+				else{
+					newClusterSum[i] += computeLikelihoodBasedOnClusters(partition[i]);
+				}
+				logL += newClusterSum[i];
+			}
+			else{
+				newClusterSum[i] = 0;
+			}
+    		
+       	}//for	
+		
+		
+		return(logL);
+	}
+        
+        
+    
+    
+    
+    public double getClusterLogLikelihood(int[] clusterLabel, int numClusters) {
+	
+	//public double getClusterLogLikelihood(double[][] vLoc, double[] mu0_offset,
+		//	int[] clusterLabel, int numClusters, int[] needUpdateCluster) {
+		
+		//double logL = computeLogLikelihood();
+    	
+		List<Measurement>[] partition = new List[clusterLabel.length]; // the size may be an overkill, but it's ok
+		for(int i=0; i < clusterLabel.length; i++){
+			partition[i] = new LinkedList();
+		}
+		//partition measurements into clusters using viruses
+	//	int j=0;
+	//	for(Measurement measurement: measurements){
+		//	if(j < 5000){
+			//	partition[0].add(measurement);
+			//}
+			//else{
+			//	partition[1].add(measurement);
+		//	}
+		//	j++;
+		//}
+
+		
+		
+	     for (Measurement measurement : measurements) {
+	    	 //System.out.println("partition #: " + clusterLabel[measurement.virus]);
+	    	 partition[clusterLabel[measurement.virus]].add(measurement);  // this is correct
+	     }
+	     
+		
+		//System.exit(0);
+		
+		double logL=0;
+		// TODO Auto-generated method stub
+		for(int i=0; i < numClusters; i++ ){
+		//for(int i=0; i < clusterLabel.length; i++ ){
+			if(partition[i].size() >0){
+				//System.out.println("partition size is = " + partition[i].size());
+				logL += computeLikelihoodBasedOnClusters(partition[i]);
+			}
+			//if(needUpdateCluster[i] == 1){
+				//update the cluster i's contribution
+				//logClusterL[i] = 1;
+			//}
+			//calculate logClusterL[i]
+			
+			
+			
+			//logL += logClusterL[i]; 
+		}
+		
+		
+		
+		return(logL);
+	}
+    
+    
 
-        System.out.println("titre\tpoint\tinterval(tail)\tinterval(cdf)\tthreshold");
-        for (double titre : titres) {
-            double point = AntigenicLikelihood.computeMeasurementLikelihood(titre, 0.0, 1.0);
-            double interval = AntigenicLikelihood.computeMeasurementIntervalLikelihood(titre + 1.0, titre, 0.0, 1.0);
-            double threshold = AntigenicLikelihood.computeMeasurementThresholdLikelihood(titre, 0.0, 1.0);
+    
+    
 
-            System.out.println(titre + "\t" + point + "\t" + interval + "\t" + threshold);
+	private double computeLikelihoodBasedOnClusters(
+			List<Measurement> linkedList) {
+  	
+
+        double precision = mdsPrecisionParameter.getParameterValue(0);
+        double sd = 1.0 / Math.sqrt(precision);
+
+        double logL = 0.0;
+        int i = 0;
+
+        for (Measurement measurement : linkedList) {
+        		double curLogL=0;
+            
+
+                double expectation = calculateBaseline(measurement.virus, measurement.serum) - computeDistance(measurement.virus, measurement.serum);
+                switch (measurement.type) {
+                    case INTERVAL: {
+
+                        double minTitre = measurement.log2Titre;
+                        double maxTitre = measurement.log2Titre + intervalWidth;
+                        curLogL = computeMeasurementIntervalLikelihood(minTitre, maxTitre, expectation, sd);
+                    } break;
+                    case POINT: {
+
+                    	curLogL = computeMeasurementLikelihood(measurement.log2Titre, expectation, sd);
+                    } break;
+                    case THRESHOLD: {
+
+                    	if(measurement.isLowerThreshold){
+                    		curLogL = computeMeasurementThresholdLikelihood(measurement.log2Titre, expectation, sd);
+                    	}
+                    	else{
+                    		curLogL = computeMeasurementUpperThresholdLikelihood(measurement.log2Titre, expectation, sd);                  		
+                    	}
+                    } break;
+                    case MISSING:
+                        break;
+                }
+            
+            logL += curLogL;
+            i++;
+               // System.out.println("curLogL = " + curLogL);
         }
+
+//        likelihoodKnown = true;
+
+//        setLocationChangedFlags(false);
+ //       setSerumEffectChangedFlags(false);
+  //      setVirusEffectChangedFlags(false);
+//System.out.println("logL = " + logL);
+        return logL;
     }
+
+	public int getNumObservations() {
+		return measurements.size();
+	}
+
+
+
+	public double getNumSera() {
+		return serumLocationsParameter.getParameterCount();
+	}
+
+
+
+	public MatrixParameter getSerumLocationsParameter() {
+		return serumLocationsParameter;
+	}
+
+
+
+
+	public void printVirusLocations() {
+		
+		for(int i=0; i < virusLocationsParameter.getParameterCount(); i++){
+		 Parameter v = virusLocationsParameter.getParameter(i);
+		 System.out.println(v.getId() + " " + v.getParameterValue(0) + "," + v.getParameterValue(1));
+		}
+		
+	}
+	
+	
+
 }
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterAlgorithmOperator.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterAlgorithmOperator.java
new file mode 100644
index 0000000..4b59feb
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterAlgorithmOperator.java
@@ -0,0 +1,619 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.util.LinkedList;
+import java.util.logging.Logger;
+
+import dr.inference.model.Likelihood;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.GammaFunction;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.math.distributions.WishartDistribution;
+import dr.math.matrixAlgebra.Matrix;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.*;
+
+
+/**
+ * A Gibbs operator for allocation of items to clusters under a distance dependent Chinese restaurant process.
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class ClusterAlgorithmOperator extends SimpleMCMCOperator  {
+
+	//Parameter locationDrift;  // no longer need to know
+	Parameter virusOffsetsParameter;
+	private double sigmaSq =1;
+	private int numdata = 0; //NEED TO UPDATE
+	//private double[] groupSize; 
+    
+    private MatrixParameter mu = null;
+    private Parameter clusterLabels = null;
+    private Parameter K = null;
+    
+    private MatrixParameter virusLocations = null;
+    
+    private int maxLabel = 0;
+    private int[] muLabels = null;
+
+    private int[] groupSize;
+  //  public ClusterViruses clusterLikelihood = null;
+
+
+   private double numAcceptMoveMu = 0;
+   private double numProposeMoveMu = 0;
+   
+   private double numAcceptMoveC = 0;
+   private double numProposeMoveC = 0;
+   
+   private int isMoveMu = -1;
+    
+   
+	private double[] old_vLoc0 ;
+	private double[] old_vLoc1 ;
+
+    private Parameter clusterOffsetsParameter;
+   
+    private int groupSelectedChange = -1;
+    private int virusIndexChange = -1;
+    private double originalValueChange = -1;
+    private int dimSelectChange = -1;
+    
+    private double[] mu0_offset;
+	
+    //public ClusterAlgorithmOperator(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter locationDrift_in, Parameter clusterOffsetsParameter) {
+    public ClusterAlgorithmOperator(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter clusterOffsetsParameter) {
+    	
+      	
+    	System.out.println("Loading the constructor for ClusterAlgorithmOperator");
+    	this.mu = mu;
+    	this.K = K;
+    	this.clusterLabels = clusterLabels;    	
+    //	this.clusterLikelihood = clusterLikelihood;
+        this.virusLocations = virusLocations;
+        this.virusOffsetsParameter = virusOffsetsParameter;
+    //    this.locationDrift = locationDrift_in;  //no longer need
+        this.clusterOffsetsParameter = clusterOffsetsParameter;
+        
+        numdata = virusOffsetsParameter.getSize();
+        System.out.println("numdata="+ numdata);
+        
+        
+        int K_int = (int) K.getParameterValue(0);
+        
+        
+        System.out.println("K_int=" + K_int);
+        groupSize = new int[K_int];
+        for(int i=0; i < K_int; i++){
+        	groupSize[i] = 0;
+        }
+                
+        
+        for(int i=0; i < numdata; i++){
+        	//System.out.println("i="+ i);
+        	int index = (int) clusterLabels.getParameterValue(i);
+        	groupSize[ index]++;
+        }
+    	
+        for(int i=0; i < numdata;i++){
+    		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+    			maxLabel = (int) clusterLabels.getParameterValue(i);
+    		}
+    	}
+        
+        //NEED maxGROUP
+        
+        //for(int i=0; i < K_int; i++){
+        	//System.out.println("groupSize=" + groupSize[i]);
+        //}
+        
+        
+        muLabels = new int[K_int];
+        
+        for(int i=0; i < maxLabel; i++){
+        	int j=0;
+            if(groupSize[i] >0){
+            	muLabels[j] = i;
+            	j++;
+            }
+        }
+ 
+        //muLabels ...
+        
+        
+        setWeight(weight);
+        
+        System.out.println("Finished loading the constructor for ClusterAlgorithmOperator");
+   
+    }
+    
+
+ 
+    /**
+     * change the parameter and return the log hastings ratio.
+     */
+    public final double doOperation() {
+    	
+    	//System.out.println("Do operation!");
+    	
+    	
+  		double[] zeroVector2D = {0,0};
+  		double[][] identityMatrix2D = new double[][]{
+  			  { 1, 0 },
+  			  { 0, 1 }};
+  		
+  		double[][] sigmaSqMatrix2D = new double[][]{
+    			  { sigmaSq, 0 },
+    			  { 0, sigmaSq }};
+    	
+    	double logHastingRatio = 0;  	
+    	double chooseOperator = Math.random();
+    	
+    	int K_int = (int) K.getParameterValue(0);
+    	
+    	
+    	
+    	double[] original_groupSize = new double[groupSize.length];
+		//recalculate groupSize
+		for(int i=0; i < groupSize.length; i++){
+			original_groupSize[i] = 0;
+		}
+		for(int i=0; i < numdata; i++){
+    		int label =  (int) clusterLabels.getParameterValue(i);
+    		original_groupSize[label  ]++;
+		}
+
+		
+//		for(int i=0; i < K_int; i++){
+//			System.out.println("group " + i + " has size=" + original_groupSize[i]);
+//		}
+		
+  		
+			for(int i=0; i < K_int; i++){
+	      		double muk_0 = mu.getParameter(i).getParameterValue(0);
+	      		double muk_1 = mu.getParameter(i).getParameterValue(1);  				
+				//System.out.println("size=" +   groupSize[i] +   " mu_k_0=" + muk_0+ " , muk_1=" + muk_1);
+			}
+			
+    	    //	System.out.println("propose a change in mu only");
+			if(chooseOperator < 0.5){
+		//	if(chooseOperator < 1){
+	    		//change nothing
+	    		isMoveMu = 1;
+   	    		int groupSelect = (int) Math.floor( Math.random()* K_int );
+   	    			groupSelectedChange = groupSelect;
+  	    		int dimSelect = (int) Math.floor( Math.random()* 2 );   		
+  	    			dimSelectChange = dimSelect;
+	//    			 System.out.println("Group selected = " + groupSelectedChange + " mu=" + mu.getParameter(groupSelectedChange).getParameterValue(0)+ "\t" + mu.getParameter(groupSelectedChange).getParameterValue(1) + "    (before change...)"  );  	    		
+	    		double change = Math.random()*2-1 ; 	
+	    				//System.out.println(change);
+      			double originalValue = mu.getParameter(groupSelect).getParameterValue(dimSelect);
+      				originalValueChange = originalValue;
+    			 mu.getParameter(groupSelect).setParameterValue(dimSelect, originalValue + change);
+//   			 		 System.out.println("Group selected = " + groupSelectedChange + " mu=" + mu.getParameter(groupSelectedChange).getParameterValue(0)+ "\t" + mu.getParameter(groupSelectedChange).getParameterValue(1) + "    (propsed to...)");
+	    		
+	      		logHastingRatio = 0;
+	    	}
+
+			   // 		System.out.println("propose a change in both C and mu");
+	    	else{		
+	    		isMoveMu = 0;
+	    			    		
+	    		int virusIndex = (int) Math.floor( Math.random()*numdata );
+	    		virusIndexChange = virusIndex;	    		
+	    		int toBin = (int) Math.floor(Math.random()*K_int);
+	    			//			System.out.println("toBin=" + toBin);
+	    		int fromBin =  (int) clusterLabels.getParameterValue(virusIndex);
+	    			// 		System.out.println("fromBin=" + fromBin);
+	    	//	if(virusIndex < 5){
+	    		//	System.out.println("virus " + virusIndex + "  from bin=" + fromBin + " to bin " + toBin);
+	    	//	}
+	    		clusterLabels.setParameterValue( virusIndex, toBin);   //the proposal
+
+	    		
+	    		
+	    		
+	    		
+	    		
+	    		//recalculate groupSize
+	    		for(int i=0; i < groupSize.length; i++){
+	    			groupSize[i] = 0;
+	    		}
+	    		for(int i=0; i < numdata; i++){
+	        		int label =  (int) clusterLabels.getParameterValue(i);
+	    			groupSize[label  ]++;
+	    		}
+	   		
+	    		//special case that needs attention on the virus label
+	    		if( (original_groupSize[fromBin] > 0) && ( groupSize[fromBin] == 0)){
+	    			
+	    			K.setParameterValue(0, K_int - 1);
+	    			System.out.println("propose the fromBin " + fromBin + "becomes 0 in size - death of a bin");
+	    			//actually that label is no longer used..
+	    			double[] ranNormal =  MultivariateNormalDistribution.nextMultivariateNormalVariance( zeroVector2D, sigmaSqMatrix2D);
+	    			
+	    			mu.getParameter(fromBin).setParameterValue(0, ranNormal[0]);
+	    			mu.getParameter(fromBin).setParameterValue(1, ranNormal[1]);
+	    			//logHastingRatio += 0;  //this move doesn't change
+	    		}
+	    		
+	    		//birth of a new bin.. assign an offset to it
+	    		if( (original_groupSize[toBin] == 0) && (groupSize[toBin] == 1)){
+	    			
+	    			K.setParameterValue(0, K_int + 1);
+	    			
+	    			System.out.println("propose the birth of bin" + toBin);
+	       			double offset = 0;
+//	       			double drift = locationDrift.getParameterValue(0); // no longer need to do this here
+	    //   			System.out.println("drift=" + drift);
+	    	         if (virusOffsetsParameter != null) {
+	    	            //	System.out.print("virus Offeset Parameter present"+ ": ");
+	    	            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+	    	            //	System.out.print(" drift= " + drift + " ");
+	    	  //              offset = drift * virusOffsetsParameter.getParameterValue(virusIndex);
+	    	                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+	    	            }
+	    	            else{
+	    	            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+	    	            }
+	    			double[] ranNormal =  MultivariateNormalDistribution.nextMultivariateNormalVariance( zeroVector2D, sigmaSqMatrix2D);
+	    			mu.getParameter(toBin).setParameterValue(0, ranNormal[0] ); // no need to assign offset anymore.. it's getting taken care of in the ClusterViruses by default
+	    	//		mu.getParameter(toBin).setParameterValue(0, ranNormal[0] + offset);
+	    			mu.getParameter(toBin).setParameterValue(1, ranNormal[1]);	    			
+	    			
+	    			//this move should change the Hasting Ratio!
+	    			//CODE HERE
+	    			
+	    		}
+	    		
+	    	} //else    	 
+			
+	    	
+
+	    /*   	
+			for(int i=0; i < K_int; i++){
+    			double muValue = mu.getParameter(i).getParameterValue(0);
+    			double muValue2 = mu.getParameter(i).getParameterValue(1);
+    			System.out.println("Group " + i + "\t" + muValue + "\t" + muValue2);
+			}
+			System.out.println("=============================");
+			*/
+
+			
+			
+			//change the mu in the toBin and fromBIn
+			//borrow from getLogLikelihood:
+
+			double[] meanYear = new double[K_int];
+			double[] groupCount = new double[K_int];
+			for(int i=0; i < numdata; i++){
+				int label = (int) clusterLabels.getParameterValue(i);
+				double year  = 0;
+		        if (virusOffsetsParameter != null) {
+		            //	System.out.print("virus Offeset Parameter present"+ ": ");
+		            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+		            //	System.out.print(" drift= " + drift + " ");
+		                year = virusOffsetsParameter.getParameterValue(i);   //just want year[i]
+		                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+		            }
+		            else{
+		            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+		            }
+				meanYear[ label] = meanYear[ label] + year;
+				
+				groupCount[ label  ] = groupCount[ label ]  +1; 
+			}
+			int maxLabel=0;
+			for(int i=0;i< numdata; i++){
+				if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+					maxLabel = (int) clusterLabels.getParameterValue(i);
+				}
+			}
+			
+			for(int i=0; i <= maxLabel; i++){
+				meanYear[i] = meanYear[i]/groupCount[i];
+				//System.out.println(meanYear[i]);
+			}
+			
+	
+			//System.out.println("beta=" + beta);
+			//beta = 1;
+
+			mu0_offset = new double[maxLabel+1];
+			//double[] mu1 = new double[maxLabel];
+					
+			
+			//System.out.println("maxLabel=" + maxLabel);
+			//now, change the mu..
+			for(int i=0; i <= maxLabel; i++){
+				//System.out.println(meanYear[i]*beta);
+				mu0_offset[i] =  meanYear[i];
+			//	System.out.println("group " + i + "\t" + mu0_offset[i]);
+			}	
+		//		System.out.println("=====================");
+			
+			
+						
+
+			//Set  the vLoc to be the corresponding mu values , and clusterOffsetsParameter to be the corresponding offsets
+	    	//virus in the same cluster has the same position
+	    	for(int i=0; i < numdata; i++){
+	        	int label = (int) clusterLabels.getParameterValue(i);
+	    		Parameter vLoc = virusLocations.getParameter(i);
+	    		
+	    		//setting the virus locs to be equal to the corresponding mu
+	    			double muValue = mu.getParameter(label).getParameterValue(0);
+	    			vLoc.setParameterValue(0, muValue);
+
+	    		  double	muValue2 = mu.getParameter(label).getParameterValue(1);
+	   				vLoc.setParameterValue(1, muValue2);
+		
+	   			
+	   			//if we want to apply the mean year virus cluster offset to the cluster
+	   			if(clusterOffsetsParameter != null){
+	   			//setting the clusterOffsets to be equal to the mean year of the virus cluster
+	   				// by doing this, the virus changes cluster AND updates the offset simultaneously
+	   				clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);
+	   			}
+     					//	System.out.println("mu0_offset[label]=" + mu0_offset[label]);
+     				//	System.out.println("clusterOffsets now becomes =" + clusterOffsetsParameter.getParameterValue(i) );   			
+	    	}
+	   // 	System.out.println("");
+	    	
+	    	
+	    	//Hasting's Ratio is p(old |new)/ p(new|old)
+
+	    	//System.out.println("Done doing operation!");
+			
+	    		    	
+	    	
+    	//return(logHastingRatio); //log hasting ratio
+    	return(logHastingRatio);
+    	
+    }
+    	
+    	
+    public void accept(double deviation) {
+    	super.accept(deviation);
+
+    	/*
+    	if(isMoveMu==1){
+    		numAcceptMoveMu++;
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numAcceptMoveC++;
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/  
+    	        	
+    	//	if(virusIndexChange <5){
+    //		System.out.println("     -  Accepted!");
+    	//	}
+          	
+    }
+    
+    public void reject(){
+    	super.reject();
+ 
+    	
+    	/*
+    	//manually change mu back..
+    	if(isMoveMu==1){
+			 mu.getParameter(groupSelectedChange).setParameterValue(dimSelectChange, originalValueChange);
+    	}
+    	//manually change all the affected vLoc back...
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+    		Parameter vLoc = virusLocations.getParameter(i);   		
+    		//	double muValue = mu.getParameter(label).getParameterValue(0);
+    		//	vLoc.setParameterValue(0, muValue);
+    		//  double	muValue2 = mu.getParameter(label).getParameterValue(1);
+   			//	vLoc.setParameterValue(1, muValue2);
+	
+ 			clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);   			
+    	}
+    	*/
+    	
+    	
+    	/*
+    	if(isMoveMu==1){
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/
+    	//if(virusIndexChange < 5){
+//		System.out.println("     -      Rejected!");
+    	//}
+      	
+      	
+      	/*
+      	for(int i=0; i < numdata; i++){
+      		Parameter vLoc = virusLocations.getParameter(i);
+
+      		if( vLoc.getParameterValue(0) != old_vLoc0[i]){
+
+      			System.out.println("virus " + i + " is different: " + vLoc.getParameterValue(0) + " and " + old_vLoc0[i]);
+      		}
+      		
+      		//System.out.println(old_vLoc0[i] + ", " + old_vLoc1[i]);
+      		vLoc.setParameterValue(0, old_vLoc0[i]);
+      		vLoc.setParameterValue(1, old_vLoc1[i]);
+      		
+		}
+      	*/
+  		//System.exit(0);
+
+      	
+      	
+
+    }
+    
+	
+
+           public final static String CLUSTERALGORITHM_OPERATOR = "ClusterAlgorithmOperator";
+
+              
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return CLUSTERALGORITHM_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+        
+           
+        
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+            	
+
+                public final static String VIRUSLOCATIONS = "virusLocations";
+            	public final static String  MU = "mu";
+            	public final static String CLUSTERLABELS = "clusterLabels";
+            	public final static String K = "k";
+            	public final static String OFFSETS = "offsets";
+      //     	public final static String LOCATION_DRIFT = "locationDrift"; //no longer need
+            	
+                public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
+
+
+                public String getParserName() {
+                    return CLUSTERALGORITHM_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+                	
+                	//System.out.println("Parser run. Exit now");
+                	//System.exit(0);
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+                    
+                    XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                        MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        
+                        cxo = xo.getChild(MU);
+                        MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(CLUSTERLABELS);
+                        Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                        cxo = xo.getChild(K);
+                        Parameter k = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(OFFSETS);
+                        Parameter offsets = (Parameter) cxo.getChild(Parameter.class);
+ 
+//                        cxo = xo.getChild(LOCATION_DRIFT);
+//                        Parameter locationDrift = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        Parameter clusterOffsetsParameter = null;
+                        if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+                        	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+                        }
+
+                        
+                        
+                    //return new ClusterAlgorithmOperator(virusLocations, mu, clusterLabels, k, weight, offsets, locationDrift, clusterOffsetsParameter);
+                        return new ClusterAlgorithmOperator(virusLocations, mu, clusterLabels, k, weight, offsets,  clusterOffsetsParameter);
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "An operator that picks a new allocation of an item to a cluster under the Dirichlet process.";
+                }
+
+                public Class getReturnType() {
+                    return ClusterAlgorithmOperator.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                        
+                        new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                        new ElementRule(MU, Parameter.class),
+                        new ElementRule(CLUSTERLABELS, Parameter.class),
+                        new ElementRule(K, Parameter.class),
+                        new ElementRule(OFFSETS, Parameter.class),
+                  //      new ElementRule(LOCATION_DRIFT, Parameter.class), //no longer needed
+   //                    
+                       new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),  // no longer REQUIRED
+                
+        
+            };
+            
+            };
+
+
+        
+            public int getStepCount() {
+                return 1;
+            }
+
+        }
+
+
+
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterComparison.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterComparison.java
new file mode 100644
index 0000000..166ed93
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterComparison.java
@@ -0,0 +1,8 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+public class ClusterComparison {
+ public int minNumMismatchPerfectSeparation;
+ public int sampleSize;
+ public double SS;
+ public double scaledMinNumMismatchPerfectSeparation;
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterOperator.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterOperator.java
new file mode 100644
index 0000000..f05a6f5
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterOperator.java
@@ -0,0 +1,443 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import dr.evomodel.antigenic.NPAntigenicLikelihood;
+import dr.inference.model.Bounds;
+import dr.inference.model.CompoundParameter;
+import dr.inference.model.Likelihood;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.RandomWalkOperator.BoundaryCondition;
+import dr.math.MathUtils;
+import dr.xml.*;
+
+
+
+public class ClusterOperator extends SimpleMCMCOperator  {
+
+
+
+            public final static String CLUSTER_OPERATOR = "clusterOperator";
+            public static final String WINDOW_SIZE = "windowSize";
+            
+    
+            public NPAntigenicLikelihood modelLikelihood;           
+            private Parameter assignments = null;
+            private Parameter links = null;
+            
+            private MatrixParameter virusLocations = null;
+            private double windowSize = 1;
+
+                    
+            
+            public ClusterOperator(Parameter assignments, MatrixParameter virusLocations, double weight, double windowSize, NPAntigenicLikelihood Likelihood, Parameter links) {
+            	//System.out.println("Constructor");
+                this.links = links;           	
+                this.assignments = assignments;
+                this.virusLocations = virusLocations;
+                this.windowSize = windowSize;
+                this.modelLikelihood = Likelihood;
+                
+                setWeight(weight);
+                
+                
+                //load in the virusLocations
+                System.out.println((int)assignments.getParameterValue(0));
+                Parameter vLoc = virusLocations.getParameter(0);
+        
+        		//double d0=  vLoc.getParameterValue(0);
+        		//double d1= vLoc.getParameterValue(1);
+        		//System.out.println( d0 + "," + d1);
+        		
+                
+                //System.exit(0);
+
+			}
+
+
+
+
+            //assume no boundary
+            /**
+             * change the parameter and return the hastings ratio.
+             */
+            public final double doOperation() {
+            	
+            	double moveWholeClusterOrChangeOnePoint = MathUtils.nextDouble();
+            	            	
+            	//double moveWholeClusterProb = 0.2;
+            	double moveWholeClusterProb = 0.5;
+            	
+            	int numSamples = assignments.getDimension();           			
+
+//           	if(moveWholeClusterOrChangeOnePoint <moveWholeClusterProb){
+      //      	System.out.print("Group changes: ");	
+            	//System.out.println("Move a group of points in a cluster together");
+            	int target = MathUtils.nextInt(numSamples);
+            	int targetCluster = (int) assignments.getParameterValue(target);
+
+ 
+            	//for(int i=0; i < numSamples; i++){
+                   //Parameter vLoc = virusLocations.getParameter(i);
+            		//double d0=  vLoc.getParameterValue(0);
+            		//double d1= vLoc.getParameterValue(1);
+            		//System.out.println( d0 + "," + d1);
+            	//}
+ 
+                // a random dimension to perturb
+                int ranDim= MathUtils.nextInt(virusLocations.getParameter(0).getDimension());
+                // a random point around old value within windowSize * 2
+                double draw = (2.0 * MathUtils.nextDouble() - 1.0) * windowSize;
+                
+                
+                
+                for(int i=0; i < numSamples; i++){
+                	if((int) assignments.getParameterValue(i)== targetCluster ){
+                		//System.out.print("update "+ i +" from ");
+                		Parameter vLoc = virusLocations.getParameter(i);
+                		//System.out.print(vLoc.getParameterValue(index));
+                		double newValue = vLoc.getParameterValue(ranDim) + draw;
+                		vLoc.setParameterValue(ranDim, newValue);
+                		//System.out.println(" to " + vLoc.getParameterValue(index));
+                	}
+                }
+               
+  //          }
+            	
+    /*        	
+            	//change cluster assignment!
+            	else{
+            		System.out.print("Cluster assignment & Group move:");
+            		//System.out.println("Change a single point to another cluster");
+            		//moving the point without coupling the point is useless!                 
+                    
+            		
+            		
+            		//randomly pick a point to change cluster assignment
+            		//move the point and couple it with change in cluster:
+            		
+                    int index = MathUtils.nextInt(numSamples);   		
+                    int oldGroup = (int)assignments.getParameterValue(index);
+                   
+                  // Set index customer link to index and all connected to it to a new assignment (min value empty)                  
+                    int minEmp = minEmpty(modelLikelihood.getLogLikelihoodsVector());
+                    links.setParameterValue(index, index);
+                    int[] visited = connected(index, links);
+                    
+                    int ii = 0;
+                    while (visited[ii]!=0){
+                            assignments.setParameterValue(visited[ii]-1, minEmp);
+                            ii++;
+                    }
+
+                   //Adjust likvector for group separated
+                   modelLikelihood.setLogLikelihoodsVector(oldGroup,modelLikelihood.getLogLikGroup(oldGroup) );
+                   modelLikelihood.setLogLikelihoodsVector(minEmp,modelLikelihood.getLogLikGroup(minEmp) );
+                   int maxFull = maxFull( modelLikelihood.getLogLikelihoodsVector());
+
+                   
+                   //pick new link
+                   int k = MathUtils.nextInt(numSamples); //wonder if there is a way to make this more efficient at choosing good moves             
+                   
+                   links.setParameterValue(index, k);                  
+                   int newGroup = (int)assignments.getParameterValue(k);
+                   int countNumChanged = 0;
+                   ii = 0;
+                   while (visited[ii]!=0){
+                           assignments.setParameterValue(visited[ii]-1, newGroup);
+                           ii++;
+                           countNumChanged++;
+                   }
+                   
+                   System.out.print("changed="+countNumChanged+ " ");
+                   
+                   int targetCluster = (int) assignments.getParameterValue(k);                  
+                   
+                  
+                   // updating conditional likelihood vector 
+                   modelLikelihood.setLogLikelihoodsVector(newGroup, modelLikelihood.getLogLikGroup(newGroup));
+                   if (newGroup!=minEmp){
+                            modelLikelihood.setLogLikelihoodsVector(minEmp, 0);
+                          
+                   }
+
+                         
+            		
+            		
+            	//change the location
+               // a random dimension to perturb
+               int ranDim= MathUtils.nextInt(virusLocations.getParameter(0).getDimension());
+               
+               // a random point around old value within windowSize * 2
+               double draw = (2.0 * MathUtils.nextDouble() - 1.0) * windowSize;
+               
+               
+               int countNumLocChanged = 0;
+               for(int i=0; i < numSamples; i++){
+               	if((int) assignments.getParameterValue(i)== targetCluster ){
+               		//System.out.print("update "+ i +" from ");
+               		Parameter vLoc = virusLocations.getParameter(i);
+               		//System.out.print(vLoc.getParameterValue(index));
+               		double newValue = vLoc.getParameterValue(ranDim) + draw;
+               		vLoc.setParameterValue(ranDim, newValue);
+               		//System.out.println(" to " + vLoc.getParameterValue(index));
+               		countNumLocChanged++;
+               	}
+               }
+               System.out.print("loc changed="+countNumLocChanged+ " ");
+ 
+            		
+              // sampleMeans(maxFull); // I decide not to resample the means here.
+           		
+            }
+            //Want to check that virusLocations WON't get updated if rejected
+                
+                
+//            	System.out.println(target);
+ //           	System.out.println(targetCluster);
+            	//for(int i=0; i < assignments.getDimension();)
+      */      	
+            	
+                return 0.0;
+        
+            }
+
+
+            
+            /*
+             * find min Empty
+             */
+            
+            public int minEmpty(double[] logLikVector){
+            int isEmpty=0;
+            int i =0;
+                while (isEmpty==0){
+            if(logLikVector[i]==0){
+                isEmpty=1;}
+            else { 
+                if(i==logLikVector.length-1){isEmpty=1;}
+                   i++;}
+                }
+            return i;
+            }
+           
+        
+            
+            /*
+             * find max Full
+             */
+            
+            
+            public int maxFull(double[] logLikVector){
+                    int isEmpty=1;
+                    int i =logLikVector.length-1;
+                        while (isEmpty==1){
+                    if(logLikVector[i]!=0){
+                        isEmpty=0;}
+                    else {i--;}
+                        }
+                    return i;
+                    }
+         /*
+          * find customers connected to i
+          */
+            
+            public int[] connected(int i, Parameter clusteringParameter){
+                int n =  clusteringParameter.getDimension();
+                int[] visited = new int[n+1]; 
+                    visited[0]=i+1;
+                    int tv=1;
+                    
+                    for(int j=0;j<n;j++){
+                        if(visited[j]!=0){
+                                int curr = visited[j]-1;
+                                
+                                /*look forward
+                                */
+                                
+                        int forward = (int) clusteringParameter.getParameterValue(curr);
+                        visited[tv] = forward+1;
+                        tv++;
+                            // Check to see if is isn't already on the list
+
+                        for(int ii=0; ii<tv-1; ii++){
+                        if(visited[ii]==forward+1){
+                                tv--;
+                                visited[tv]=0;
+                        }
+                        }
+
+
+                        /*look back
+                        */
+                        for (int jj=0; jj<n;jj++){
+                                if((int)clusteringParameter.getParameterValue(jj)==curr){ 
+                                        visited[tv]= jj+1;
+                                        tv++;
+                                        
+                                        for(int ii=0; ii<tv-1; ii++){
+                                        if(visited[ii]==jj+1){
+                                                tv--;
+                                                visited[tv]=0;
+                                        }               
+                                }
+          
+                        }
+                        }
+                        
+                        }}
+                return visited;
+                
+            }
+                        
+            
+            
+            public void accept(double deviation) {
+            	super.accept(deviation);
+  //          	System.out.println("Accepted!");
+            	
+            }
+            
+            public void reject(){
+            	super.reject();
+    //        	System.out.println("Rejected");
+            }
+            
+            
+            
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return CLUSTER_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+
+            
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+                public final static String ASSIGNMENTS = "assignments";
+                public final static String VIRUS_LOCATIONS = "virusLocations";
+
+                public final static String LIKELIHOOD = "likelihood";
+                public final static String LINKS = "links";
+                
+                
+                public String getParserName() {
+                    return CLUSTER_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+              //      double windowSize = xo.getDoubleAttribute(WINDOW_SIZE);
+
+                    
+                    XMLObject cxo =  xo.getChild(ASSIGNMENTS);
+                        Parameter assignments = (Parameter) cxo.getChild(Parameter.class);  
+                        
+                        MatrixParameter virusLocationsParameter = null;
+                        if (xo.hasChildNamed(VIRUS_LOCATIONS)) {
+                            virusLocationsParameter = (MatrixParameter) xo.getElementFirstChild(VIRUS_LOCATIONS);
+                        }
+                   
+                        cxo = xo.getChild(LINKS);
+                        Parameter links = (Parameter) cxo.getChild(Parameter.class);
+
+                        cxo = xo.getChild(LIKELIHOOD);
+                        NPAntigenicLikelihood likelihood = (NPAntigenicLikelihood)cxo.getChild(NPAntigenicLikelihood.class);
+                                                
+                        //set window size to be 1
+                    return new ClusterOperator(assignments, virusLocationsParameter, weight, 1.0, likelihood, links);
+          //          public ClusterOperator(Parameter assignments, MatrixParameter virusLocations, double weight, double windowSize, NPAntigenicLikelihood Likelihood, Parameter links) {
+
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "An operator that picks a new allocation of an item to a cluster under the Dirichlet process.";
+                }
+
+                public Class getReturnType() {
+                    return ClusterOperator.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                   //     AttributeRule.newDoubleRule(WINDOW_SIZE),                        
+                        new ElementRule(ASSIGNMENTS,
+                                new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+                       new ElementRule(VIRUS_LOCATIONS, MatrixParameter.class, "Parameter of locations of all virus"),
+                       new ElementRule(LINKS,
+                               new XMLSyntaxRule[]{new ElementRule(Parameter.class)}), 
+                        new ElementRule(LIKELIHOOD, new XMLSyntaxRule[] {
+                                       new ElementRule(Likelihood.class),}, true),                              
+        
+            };
+            
+            };            
+            
+
+
+       
+            public int getStepCount() {
+                return 1;
+            }
+
+        }
+
+
+
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterViruses.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterViruses.java
new file mode 100644
index 0000000..e238d4d
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterViruses.java
@@ -0,0 +1,619 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Set;
+import java.util.logging.Logger;
+
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.NodeRef;
+//import dr.evomodel.antigenic.driver.OrderDouble;
+
+import dr.evomodel.tree.TreeModel;
+import dr.evomodelxml.treelikelihood.TreeTraitParserUtilities;
+import dr.inference.model.AbstractModelLikelihood;
+import dr.inference.model.CompoundParameter;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Model;
+import dr.inference.model.Parameter;
+import dr.inference.model.Variable;
+import dr.inference.operators.MCMCOperator;
+import dr.math.GammaFunction;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.StringAttributeRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+/**
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+//Some suggestion to speed up the code from Charles Cheung 
+//(please scroll through to places marked by to see the places of changes   //---------- suggestion from cykc----------------
+
+		
+public class ClusterViruses extends AbstractModelLikelihood {
+	
+	//---------- suggestion from cykc----------------
+	private double mostRecentTransformedValue = 0;  //keep a copy of the most recent version of transformFactor, to keep track of whether the transformFactor has changed
+	//private boolean ShouldUpdateDepMatrix = true;  // this way of flagging for change is not used anymore
+	private boolean treeChanged = false; //a flag that becomes true when treeModel changes
+	//---------- End of suggestion from cykc----------------		
+        
+    public static final String CLUSTER_VIRUSES = "ClusterViruses";
+    
+    
+    
+    
+    //==============================================================================================================================
+    //variables
+    
+    
+    double lambda = 10;
+	double sigmaSq = 9;
+   // double sigmaSq = 100; //when offset is off
+
+    
+    //K - number of parameters
+    Parameter K;   // for now, there is no move to change K 
+    
+    //E|K - excision points
+    Parameter excisionPoints;
+    
+    //C
+    Parameter clusterLabels;
+    
+    //mu - means
+    MatrixParameter mu;
+    
+    
+    double[] muLabels;
+    
+    
+   MatrixParameter virusLocations = null;
+
+    
+    
+    
+   // Parameter virusOffsetsParameter; //need to read it from AntigenicLIkelihood   (the year ) // use offsets instead
+    
+    //need to stop the virus from 
+    
+    
+    
+    
+    
+        
+ public ClusterViruses (TreeModel treeModel_in, 
+		 				Parameter K_in, 
+		 				Parameter excisionPoints_in, 
+		 				Parameter clusterLabels_in, 
+		 				MatrixParameter mu_in, 
+		 				Boolean hasDrift, 
+		 		//		Parameter locationDrift_in, 
+		 				Parameter offsets_in,
+		 				MatrixParameter virusLocations_in){
+	 
+		super(CLUSTER_VIRUSES);
+		
+		this.treeModel= treeModel_in;
+		this.K = K_in;
+		this.excisionPoints = excisionPoints_in;
+		this.clusterLabels = clusterLabels_in;
+		this.mu = mu_in;
+		
+		  this.hasDrift=hasDrift;
+  //      this.locationDrift=locationDrift_in;
+        this.offsets=offsets_in;
+		//this.hasDrift=false;
+        this.virusLocations = virusLocations_in;
+        numdata = offsets.getSize();
+        
+        System.out.println("numdata = " + numdata);
+        
+        //initialize clusterLabels
+        clusterLabels.setDimension(numdata);
+         for (int i = 0; i < numdata; i++) {      	
+            clusterLabels.setParameterValue(i, 0);
+        }
+         addVariable(clusterLabels);
+         
+        
+         
+         //initialize mu
+         mu.setColumnDimension(2);
+         //mu.setRowDimension(numdata);  //in reality, only K of them are used
+         int K_int =  (int) K.getParameterValue(0);
+         mu.setRowDimension(K_int);  //in reality, only K of them are used
+         //System.out.println((int) K.getParameterValue(0));
+         //System.exit(0);
+         
+         for(int i=0; i < K_int; i++){
+        	 //can I set initial condition to be like this? and then let the algorithm set it properly later?
+        	 double zero=0;
+        	 mu.getParameter(i).setValue(0, zero);
+        	 mu.getParameter(i).setValue(1, zero);
+         }
+         
+         //adding the pre-clustering step.
+         preClustering();
+         
+        /*
+        this.abc.setColumnDimension(2);  //set dimension equal to 2
+        abc.setRowDimension(strains.size());
+        for (int i = 0; i < strains.size(); i++) {
+            abc.getParameter(i).setId(strains.get(i));
+        }
+        */
+        
+         
+		addVariable(virusLocations);
+		addModel(treeModel);
+		//addVariable(locationDrift);
+		addVariable(offsets);
+		 addVariable(K);
+		 addVariable(excisionPoints);
+		 addVariable(mu);
+		 System.out.println("Finished loading the constructor for ClusterViruses");
+
+ }    
+    
+    
+ 
+private void preClustering() {
+
+	int numViruses = offsets.getSize();
+	System.out.println("# offsets = " + offsets.getSize());
+	//for(int i=0; i < offsets.getSize(); i++){
+		//System.out.println(offsets.getParameterValue(i));
+	//}
+	
+	
+	List<OrderDouble> list = new ArrayList<OrderDouble>();
+	for(int i=0; i < numViruses; i++){ 
+		 list.add(new OrderDouble(i,  offsets.getParameterValue(i)));  //offset of 1
+	}
+	Collections.sort(list, new OrderDouble());	
+
+	int initialEqualBinSize = numViruses/(int)(K.getParameterValue(0) -1);
+	System.out.println("initial bin size = " + initialEqualBinSize);
+	System.out.println("Initial cluster assignment:");
+//	System.out.println("virus index\tOffset\tCluster label");
+	for(int i=0; i < numViruses; i++){
+//		  System.out.println(list.get(i).getIndex() + "\t" + list.get(i).getValue()  +"\t"+ i/ initialEqualBinSize   );
+		  
+		  int label = i/initialEqualBinSize;
+		  clusterLabels.setParameterValue(list.get(i).getIndex() , label);
+	}
+	
+	
+	
+	
+	/*
+	//borrow from getLogLikelihood:
+	int K_int = (int) K.getParameterValue(0);
+
+	double[] meanYear = new double[K_int];
+	double[] groupCount = new double[K_int];
+	for(int i=0; i < numdata; i++){
+		int label = (int) clusterLabels.getParameterValue(i);
+		double year  = 0;
+        if (offsets != null) {
+            //	System.out.print("virus Offeset Parameter present"+ ": ");
+            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+            //	System.out.print(" drift= " + drift + " ");
+                year = offsets.getParameterValue(i);   //just want year[i]
+                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+            }
+            else{
+            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+            }
+		meanYear[ label] += year;
+		
+		groupCount[ label  ] = groupCount[ label ]  +1; 
+	}
+	*/
+	int maxLabel=0;
+	for(int i=0;i< numdata; i++){
+		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+			maxLabel = (int) clusterLabels.getParameterValue(i);
+		}
+	}
+	
+	
+	/*
+	for(int i=0; i <= maxLabel; i++){
+		meanYear[i] = meanYear[i]/groupCount[i];
+		//System.out.println(meanYear[i]);
+	}
+	*/
+	
+	//double beta = locationDrift.getParameterValue(0);
+	
+	
+	//now, change the mu..
+	for(int i=0; i <= maxLabel; i++){
+		//System.out.println(meanYear[i]*beta);
+		//mu.getParameter(i).setParameterValue(0, meanYear[i]*beta);//now separate out mu from virusLocation
+		mu.getParameter(i).setParameterValue(0, 0);
+		mu.getParameter(i).setParameterValue(1, 0);
+	}	
+	
+	//now change the clusterOffsets
+	//... is it necessary?
+	
+	//System.exit(0);
+}
+
+
+
+public double getLogLikelihood() {
+	
+	//System.out.println("getLogLikelihood of ClusterViruses");
+	
+	double logL = 0;
+
+	
+	int maxLabel=0;
+	for(int i=0;i< numdata; i++){
+		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+			maxLabel = (int) clusterLabels.getParameterValue(i);
+		}
+	}
+
+	//P(K=k)
+	int K_int = (int) K.getParameterValue(0);
+	//logL += Math.log(K.getParameterValue(0)) - lambda*K.getParameterValue(0) - Math.log( (double) factorial(K_int));
+	logL += -lambda + K.getParameterValue(0)*Math.log(lambda) - Math.log( (double) factorial(K_int));
+
+	
+	// p(C | K= k)
+	logL -= numdata * Math.log(K.getParameterValue(0));
+	
+	//p(mu_j | C, years) ~ N( theta , sigma^2)
+	//logL -= Math.log(2*Math.PI);
+	//logL -= 0.5*Math.log( sigmaSq*sigmaSq);
+	
+	
+	logL -= K.getParameterValue(0) * ( Math.log(2)  + Math.log(Math.PI)+ 0.5*Math.log(sigmaSq) +  0.5*Math.log(sigmaSq) );
+	
+	//double[] meanYear = new double[K_int];
+	double[] groupCount = new double[K_int];
+	for(int i=0; i < numdata; i++){
+		int label = (int) clusterLabels.getParameterValue(i);
+		groupCount[ label  ] = groupCount[ label ]  +1; 
+	}
+	
+	//for(int i=0; i <= maxLabel; i++){
+		//meanYear[i] = meanYear[i]/groupCount[i];
+		//System.out.println(meanYear[i]);
+	//}
+	
+	
+	for(int i=0; i <= maxLabel; i++){
+		double mu_i0 = mu.getParameter(i).getParameterValue(0);
+		double mu_i1 = mu.getParameter(i).getParameterValue(1);
+		
+		//double beta = locationDrift.getParameterValue(0);
+		if( groupCount[i] >0){
+			//System.out.println("meanYear = " + meanYear[i]);
+			//logL -=	0.5*(  (mu_i0 - beta*meanYear[i] )*(mu_i0 - beta*meanYear[i] ) + ( mu_i1 -0)*( mu_i1 - 0)   )/sigmaSq;
+			logL -=	0.5*(  (mu_i0  )*(mu_i0  ) + ( mu_i1 )*( mu_i1 )   )/sigmaSq;
+		}
+	}
+	
+	//System.out.println(logL);
+
+//	System.out.println("logL=" + logL);	
+	//System.out.println("done getLogLikelihood of ClusterViruses");
+
+	//System.out.println("logL=" + logL);
+
+	
+	//double logL = 0; // for testing purpose only
+	return(logL);
+/*	
+// if treeModel changes, compute the depMatrix from scratch
+// if only the transformFactor change, go back to the latest copy of the untransformed deptMatrix,
+//and transform it, so it doesn't have to go through the treeModel to get the distance.
+if(treeChanged==true){
+  // setDepMatrix();   //the super slow step          
+ if(treeChanged ==true){
+	  treeChanged = false;
+ }
+ 
+}
+
+
+ double logL = 0.0;
+         for (int j=0 ; j<logLikelihoodsVector.length;j++){
+                  logLikelihoodsVector[j]=getLogLikGroup(j);
+
+                         logL +=logLikelihoodsVector[j];
+
+         }
+         
+         for (int j=0 ; j<links.getDimension();j++){
+       if(links.getParameterValue(j)==j){
+               logL += Math.log(alpha.getParameterValue(0));
+       }
+       else{logL += Math.log(depMatrix[j][(int) links.getParameterValue(j)]);
+               
+       }
+       
+       double sumDist=0.0;
+       for (int i=0;i<numdata;i++){
+               if(i!=j){sumDist += depMatrix[i][j];
+               }
+               }
+       
+         logL-= Math.log(alpha.getParameterValue(0)+sumDist);
+         }
+     
+        
+        return logL;
+      */
+ }
+
+    
+    
+//=====================================================================================================================
+        
+        
+public  int factorial(int n) {
+    int fact = 1; // this  will be the result
+    for (int i = 1; i <= n; i++) {
+        fact *= i;
+    }
+    return fact;
+}
+	
+
+
+
+
+    public Model getModel() {
+                return this;
+            }
+
+          
+
+
+
+
+    public void makeDirty() {
+            }
+
+            public void acceptState() {
+                // DO NOTHING
+            }
+
+            public void restoreState() {
+                // DO NOTHING
+            }
+
+            public void storeState() {
+                // DO NOTHING
+            }
+
+            
+
+            protected void handleModelChangedEvent(Model model, Object object, int index) {
+                //---------- suggestion from cykc----------------
+                // I am making an assumption that whenever treeModel changes, the changes get caught here.             	
+            	if(model == treeModel){
+            		//System.out.println("==========Tree model changes!!!!!!!!=====");
+            		treeChanged = true;
+            	}
+                else{
+                }
+            	//---------- End of suggestion from cykc----------------      	
+            }
+
+            
+          //---------- suggestion from cykc----------------
+            //I tried to catch the transformedFactor changes through this routine, but it seems that it doesn't catch all,
+            //so I abandoned this routine and now use 'mostRecentTransformedValue' to directly test if transformedValue has changed.
+            //This ShouldUpdateDepMatrix never gets used and is now an obsolete variable.
+            
+            //I am noticing that handleVariableChangedEvent doesn't always catch when transformFactor changes.
+            //I am observing that sometimes transformFactor can change more than once within a single MCMC sample - I don't know why,
+            //if getLogLikelihood() gets called after the transformFactor changes but ShouldUpdateDepMatrix flag doesn't catch it,
+            //then the getLogLikelihood() will not be calculated correctly.
+            //Hence, this way of catching when transformFactor changes now becomes obsolete
+            //
+            protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+            	/*
+            	if (variable == transformFactor) {
+            	//System.out.println("TransformFactor gets updated and is caught here!!");
+            		ShouldUpdateDepMatrix = true;
+
+                } else {
+                	//has to change at another sample instead of setting it to false right after
+                	//updating the matrix because the transformFactor value 
+                	//can update twice within a sample, so setDepMatrix has to update twice
+                	if(ShouldUpdateDepMatrix == true){
+                		//System.out.println("ShouldUpdateDepMatrix changes from true to false");
+                		ShouldUpdateDepMatrix = false;
+                	}
+                }
+            	 */
+            }
+            //---------- End of suggestion from cykc----------------
+
+        Set<NodeRef> allTips;  
+        CompoundParameter traitParameter;  
+        Parameter alpha;
+        Parameter clusterPrec ;
+        Parameter priorPrec ;
+        Parameter priorMean ;
+        Parameter assignments;
+    Parameter links;
+    Parameter means2;
+    Parameter means1;
+    Parameter locationDrift;
+    Parameter offsets;
+    boolean hasDrift;
+
+    TreeModel treeModel;
+        String traitName;
+        double[][] data;
+        double[][] depMatrix;
+        double[][] logDepMatrix;
+        double[][] cur_untransformedMatrix; //---------- suggestion from cykc----------------
+        
+        double[] logLikelihoodsVector;
+        int numdata;
+        Parameter transformFactor;
+        double k0;
+    double v0;
+    double[][] T0Inv;
+        double[] m;
+    double logDetT0;
+    
+    LinkedList<Integer>[] assignmentsLL; 
+    int seqLength;
+    
+    public int getSeqLength() {
+		return seqLength;
+	}
+
+    char[][] seqData;
+
+
+
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+    	
+    	public final static String TREEMODEL = "treeModel";
+    	public final static String K = "k";
+    	public final static String EXCISIONPOINTS = "excisionPoints";
+    	public final static String CLUSTERLABELS = "clusterLabels";
+    	public final static String  MU = "mu";
+    	//public final static String HASDRIFT = ??
+ //   	public final static String LOCATION_DRIFT = "locationDrift";
+    	public final static String OFFSETS = "offsets";
+    	public final static String VIRUS_LOCATIONS = "virusLocations";
+
+        boolean integrate = false;
+        
+        
+        public String getParserName() {
+            return CLUSTER_VIRUSES;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+     
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+                //String traitName = (String) xo.getAttribute(TRAIT_NAME);
+                
+                XMLObject cxo = xo.getChild(K);
+                Parameter k = (Parameter) cxo.getChild(Parameter.class);
+                
+
+                cxo = xo.getChild(EXCISIONPOINTS);
+                Parameter excisionPoints = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(CLUSTERLABELS);
+                Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                //alternative way to load in the MatrixParameter?
+              //  MatrixParameter serumLocationsParameter = null;
+               // if (xo.hasChildNamed(SERUM_LOCATIONS)) {
+                //    serumLocationsParameter = (MatrixParameter) xo.getElementFirstChild(SERUM_LOCATIONS);
+                //}
+
+                
+                
+      //          cxo=xo.getChild(LOCATION_DRIFT) ;
+   //             Parameter locationDrift= (Parameter) cxo.getChild(Parameter.class);
+
+                cxo=xo.getChild(OFFSETS);
+                Parameter offsets =(Parameter) cxo.getChild(Parameter.class);
+
+                cxo=xo.getChild(VIRUS_LOCATIONS);
+                MatrixParameter virusLocations =(MatrixParameter) cxo.getChild(MatrixParameter.class);
+                
+            boolean hasDrift = false;
+            if (offsets.getDimension()>1){
+                hasDrift=true;
+            }
+
+            
+           // TreeTraitParserUtilities utilities = new TreeTraitParserUtilities();
+           // String traitName = TreeTraitParserUtilities.DEFAULT_TRAIT_NAME;
+         
+            
+          //  TreeTraitParserUtilities.TraitsAndMissingIndices returnValue =
+         //           utilities.parseTraitsFromTaxonAttributes(xo, traitName, treeModel, integrate);
+         //  // traitName = returnValue.traitName;
+         //   CompoundParameter traitParameter = returnValue.traitParameter;
+                
+ 
+			//return new 	ClusterViruses (treeModel,traitParameter ,  K, excisionPoints, clusterLabels, mu,  hasDrift); 
+		//	return new ClusterViruses(treeModel, k, excisionPoints, clusterLabels, mu, hasDrift, locationDrift, offsets, virusLocations);
+        	return new ClusterViruses(treeModel, k, excisionPoints, clusterLabels, mu, hasDrift, offsets, virusLocations);
+
+					
+            }
+
+            //************************************************************************
+            // AbstractXMLObjectParser implementation
+            //************************************************************************
+
+            public String getParserDescription() {
+                return "clustering viruses";
+            }
+
+            public Class getReturnType() {
+                return ClusterViruses.class;
+            }
+
+            public XMLSyntaxRule[] getSyntaxRules() {
+                return rules;
+            }
+            
+            
+            private final XMLSyntaxRule[] rules = {
+                   // new StringAttributeRule(TreeTraitParserUtilities.TRAIT_NAME, "The name of the trait for which a likelihood should be calculated"),
+                   // new ElementRule(TREEMODEL, Parameter.class),
+                    new ElementRule(K, Parameter.class),
+                    new ElementRule(EXCISIONPOINTS, Parameter.class),
+                    new ElementRule(CLUSTERLABELS, Parameter.class),
+                    new ElementRule(MU, MatrixParameter.class),
+      //              new ElementRule(LOCATION_DRIFT, Parameter.class),
+                    new ElementRule(OFFSETS, Parameter.class),
+                    new ElementRule(VIRUS_LOCATIONS, MatrixParameter.class)
+            };
+            
+    };
+
+
+    
+  
+
+    String Atribute = null;
+
+        
+}
+
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperator.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperator.java
new file mode 100644
index 0000000..ce8ec4e
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperator.java
@@ -0,0 +1,225 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+
+import dr.inference.model.Bounds;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.math.MathUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+public class ClusterWalkOperator extends AbstractCoercableOperator {
+
+
+
+    public enum BoundaryCondition {
+        reflecting,
+        absorbing
+    }
+
+    public ClusterWalkOperator(Parameter parameter, double windowSize, BoundaryCondition bc, double weight, CoercionMode mode) {
+        this(parameter, null, windowSize, bc, weight, mode);
+    }
+
+    public ClusterWalkOperator(Parameter parameter, Parameter updateIndex, double windowSize, BoundaryCondition bc,
+                              double weight, CoercionMode mode) {
+        this(parameter, updateIndex, windowSize, bc, weight, mode, null, null);
+    }
+
+    public ClusterWalkOperator(Parameter parameter, Parameter updateIndex, double windowSize, BoundaryCondition bc,
+                              double weight, CoercionMode mode, Double lowerOperatorBound, Double upperOperatorBound) {
+        super(mode);
+        this.parameter = parameter;
+        this.windowSize = windowSize;
+        this.condition = bc;
+
+        setWeight(weight);
+        if (updateIndex != null) {
+            updateMap = new ArrayList<Integer>();
+            for (int i = 0; i < updateIndex.getDimension(); i++) {
+                if (updateIndex.getParameterValue(i) == 1.0)
+                    updateMap.add(i);
+            }
+        }
+
+        this.lowerOperatorBound = lowerOperatorBound;
+        this.upperOperatorBound = upperOperatorBound;
+    }
+
+    /**
+     * @return the parameter this operator acts on.
+     */
+    public Parameter getParameter() {
+        return parameter;
+    }
+
+    public final double getWindowSize() {
+        return windowSize;
+    }
+
+    /**
+     * change the parameter and return the hastings ratio.
+     */
+    public final double doOperation() throws OperatorFailedException {
+    	System.out.println("Walking cluster");
+
+        // a random dimension to perturb
+        int index;
+        if (updateMap == null) {
+            index = MathUtils.nextInt(parameter.getDimension());
+        } else {
+            index = updateMap.get(MathUtils.nextInt(updateMap.size()));
+        }
+
+        // a random point around old value within windowSize * 2
+        double draw = (2.0 * MathUtils.nextDouble() - 1.0) * windowSize;
+        double newValue = parameter.getParameterValue(index) + draw;
+
+        final Bounds<Double> bounds = parameter.getBounds();
+        final double lower = (lowerOperatorBound == null ? bounds.getLowerLimit(index) : Math.max(bounds.getLowerLimit(index), lowerOperatorBound));
+        final double upper = (upperOperatorBound == null ? bounds.getUpperLimit(index) : Math.min(bounds.getUpperLimit(index), upperOperatorBound));
+
+        if (condition == BoundaryCondition.reflecting) {
+            newValue = reflectValue(newValue, lower, upper);
+        } else if (newValue < lower || newValue > upper) {
+            throw new OperatorFailedException("proposed value outside boundaries");
+        }
+
+        parameter.setParameterValue(index, newValue);
+
+        return 0.0;
+    }
+
+    public double reflectValue(double value, double lower, double upper) {
+
+        double newValue = value;
+
+        if (value < lower) {
+            if (Double.isInfinite(upper)) {
+                // we are only going to reflect once as the upper bound is at infinity...
+                newValue = lower + (lower - value);
+            } else {
+                double remainder = lower - value;
+
+                double widths = Math.floor(remainder / (upper - lower));
+                remainder -= (upper - lower) * widths;
+
+                // even reflections
+                if (widths % 2 == 0) {
+                    newValue = lower + remainder;
+                    // odd reflections
+                } else {
+                    newValue = upper - remainder;
+                }
+            }
+        } else if (value > upper) {
+            if (Double.isInfinite(lower)) {
+                // we are only going to reflect once as the lower bound is at -infinity...
+                newValue = upper - (newValue - upper);
+            } else {
+
+                double remainder = value - upper;
+
+                double widths = Math.floor(remainder / (upper - lower));
+                remainder -= (upper - lower) * widths;
+
+                // even reflections
+                if (widths % 2 == 0) {
+                    newValue = upper - remainder;
+                    // odd reflections
+                } else {
+                    newValue = lower + remainder;
+                }
+            }
+        }
+
+        return newValue;
+    }
+
+    public double reflectValueLoop(double value, double lower, double upper) {
+        double newValue = value;
+
+        while (newValue < lower || newValue > upper) {
+            if (newValue < lower) {
+                newValue = lower + (lower - newValue);
+            }
+            if (newValue > upper) {
+                newValue = upper - (newValue - upper);
+
+            }
+        }
+
+        return newValue;
+    }
+
+    //MCMCOperator INTERFACE
+    public final String getOperatorName() {
+        return parameter.getParameterName();
+    }
+
+    public double getCoercableParameter() {
+        return Math.log(windowSize);
+    }
+
+    public void setCoercableParameter(double value) {
+        windowSize = Math.exp(value);
+    }
+
+    public double getRawParameter() {
+        return windowSize;
+    }
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+
+        double ws = OperatorUtils.optimizeWindowSize(windowSize, parameter.getParameterValue(0) * 2.0, prob, targetProb);
+
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try decreasing windowSize to about " + ws;
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try increasing windowSize to about " + ws;
+        } else return "";
+    }
+
+    public String toString() {
+        return ClusterWalkOperatorParser.CLUSTER_WALK_OPERATOR + "(" + parameter.getParameterName() + ", " + windowSize + ", " + getWeight() + ")";
+    }
+
+    //PRIVATE STUFF
+
+    private Parameter parameter = null;
+    private double windowSize = 0.01;
+    private List<Integer> updateMap = null;
+    private final BoundaryCondition condition;
+
+    private final Double lowerOperatorBound;
+    private final Double upperOperatorBound;
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperatorParser.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperatorParser.java
new file mode 100644
index 0000000..949d0d7
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/ClusterWalkOperatorParser.java
@@ -0,0 +1,92 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+
+
+import dr.inference.model.Parameter;
+import dr.inference.operators.CoercableMCMCOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.xml.*;
+
+
+public class ClusterWalkOperatorParser  extends AbstractXMLObjectParser {
+
+
+    public static final String CLUSTER_WALK_OPERATOR = "ClusterWalkOperator";
+    public static final String WINDOW_SIZE = "windowSize";
+    public static final String UPDATE_INDEX = "updateIndex";
+    public static final String UPPER = "upper";
+    public static final String LOWER = "lower";
+
+    public static final String BOUNDARY_CONDITION = "boundaryCondition";
+
+        public String getParserName() {
+        	System.out.println("Yo!");
+
+            return CLUSTER_WALK_OPERATOR;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        	CoercionMode mode = CoercionMode.parseMode(xo);
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double windowSize = xo.getDoubleAttribute(WINDOW_SIZE);
+            Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+
+            Double lower = null;
+            Double upper = null;
+
+            if (xo.hasAttribute(LOWER)) {
+                lower = xo.getDoubleAttribute(LOWER);
+            }
+
+            if (xo.hasAttribute(UPPER)) {
+                upper = xo.getDoubleAttribute(UPPER);
+            }
+
+            ClusterWalkOperator.BoundaryCondition condition = ClusterWalkOperator.BoundaryCondition.valueOf(
+                    xo.getAttribute(BOUNDARY_CONDITION, ClusterWalkOperator.BoundaryCondition.reflecting.name()));
+
+            if (xo.hasChildNamed(UPDATE_INDEX)) {
+                XMLObject cxo = xo.getChild(UPDATE_INDEX);
+                Parameter updateIndex = (Parameter) cxo.getChild(Parameter.class);
+                if (updateIndex.getDimension() != parameter.getDimension())
+                    throw new RuntimeException("Parameter to update and missing indices must have the same dimension");
+                return new ClusterWalkOperator(parameter, updateIndex, windowSize, condition,
+                        weight, mode, lower, upper);
+            }
+
+            return new ClusterWalkOperator(parameter, null, windowSize, condition, weight, mode, lower, upper);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a cluster walk operator on a given parameter.";
+        }
+
+        public Class getReturnType() {
+            return MCMCOperator.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(WINDOW_SIZE),
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(LOWER, true),
+                AttributeRule.newDoubleRule(UPPER, true),
+                AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
+                new ElementRule(UPDATE_INDEX,
+                        new XMLSyntaxRule[] {
+                                new ElementRule(Parameter.class),
+                        },true),
+                new StringAttributeRule(BOUNDARY_CONDITION, null, ClusterWalkOperator.BoundaryCondition.values(), true),
+                new ElementRule(Parameter.class)
+        };
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/OrderDouble.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/OrderDouble.java
new file mode 100644
index 0000000..0af45e4
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/OrderDouble.java
@@ -0,0 +1,52 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.util.Comparator;
+
+public class OrderDouble implements Comparator<OrderDouble>, Comparable<OrderDouble>{
+	   private Integer index;
+	   private double value;
+	   private double value2;
+	   
+	   OrderDouble(){
+	   }
+
+	   OrderDouble(Integer i, double v){
+	      index = i;
+	      value = v;
+	   }
+	   
+	   OrderDouble(Integer i, double v, double a){
+		      index = i;
+		      value = v;
+		      value2 = a;
+		   }
+
+	   public Integer getIndex(){
+	      return index;
+	   }
+
+	   public double getValue(){
+	      return value;
+	   }
+	   public double getValue2(){
+		   return value2;
+	   }
+	   // Overriding the compareTo method
+	   public int compareTo(OrderDouble d){
+	      return (this.index).compareTo(d.index);
+	   }
+
+	   // Overriding the compare method to sort the value
+	   //Returns a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than the second.
+	   public int compare(OrderDouble d, OrderDouble d1){
+		  if(d.value - d1.value > 0){
+	      return 1;
+		  }
+		  else if(d.value - d1.value < 0){
+			  return -1;
+		  }
+		  else{
+			  return 0;
+		  }
+	   }
+	}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TiterImporter.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TiterImporter.java
new file mode 100644
index 0000000..5b7a868
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TiterImporter.java
@@ -0,0 +1,344 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+/*
+ * NewickImporter.java
+ *
+ * Copyright (C) 2002-2010 Alexei Drummond and Andrew Rambaut
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * BEAST is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+//package dr.evolution.io;
+
+import dr.evolution.io.Importer;
+import dr.evolution.tree.FlexibleNode;
+import dr.evolution.tree.FlexibleTree;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.MutableTree.InvalidTreeException;
+import dr.evolution.util.Taxon;
+import dr.evolution.util.TaxonList;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.util.ArrayList;
+
+/**
+ * Class for importing Newick tree file format
+ *
+ * @author Andrew Rambaut
+ * @author Alexei Drummond
+ * @version $Id: NewickImporter.java,v 1.20 2005/12/07 11:25:35 rambaut Exp $
+ */
+public class TiterImporter extends Importer  {
+    public static final String COMMENT = "comment";
+
+    public class BranchMissingException extends ImportException {
+        /**
+         *
+         */
+        private static final long serialVersionUID = 777435104809244693L;
+
+        public BranchMissingException() {
+            super();
+        }
+
+        public BranchMissingException(String msg) {
+            super("Branch missing: " + msg);
+            System.err.println(msg);
+        }
+    }
+
+    /**
+     * @param reader A reader to a source containing a tree in Newick format
+     */
+    public TiterImporter(Reader reader) {
+        super(reader);
+    }
+
+    /**
+     * @param treeString a string containing a tree in newick format
+     */
+    public TiterImporter(String treeString) {
+        this(new StringReader(treeString));
+    }
+
+    private String virusStrain[];
+    private String transformed_titer[];
+    private String titer[];
+
+    public void readTiter(int numNodes) throws IOException, ImportException{
+    	
+    	virusStrain = new String[numNodes];
+    	transformed_titer = new String[numNodes];
+    	titer = new String[numNodes];
+
+    	readToken();
+    	readToken();
+    	readToken();
+    	readToken();
+    	//System.out.println(readToken());
+    	//System.out.println(readToken());
+    	//System.out.println(readToken());
+    	//System.out.println(readToken());
+
+    	for(int i=0; i < (numNodes); i++){
+    		//System.out.println(readInteger()); // we just assume 1 to numNodes now
+    		readInteger();
+    		virusStrain[i] = readToken();
+    		//System.out.println(virusStrain[i]);
+ 		
+    		transformed_titer[i] = readToken();
+    		//System.out.println(transformed_titer[i]);
+  		
+    		try{
+    		//titer[i] = read() ;
+    			titer[i] = readLine();
+    			//System.out.println(titer[i]);
+    		} catch (EOFException e) {
+            //throw new RuntimeException(ite.getMessage());
+    			System.out.println("err");
+    		}
+    		
+
+    	}
+   	
+    }
+    
+    public String getVirusStrain(int i) {
+		return virusStrain[i];
+	}
+
+	public String getTransformed_titer(int i) {
+		return transformed_titer[i];
+	}
+
+	public String getTiter(int i) {
+		return titer[i];
+	}
+
+	/**
+     * importTree.
+     */
+    public Tree importTree(TaxonList taxonList) throws IOException, ImportException {
+        setCommentDelimiters('[', ']', '\0', '\0', '&');
+
+        try {
+            skipUntil("(");
+            unreadCharacter('(');
+
+            final FlexibleNode root = readInternalNode(taxonList);
+            if (getLastMetaComment() != null) {
+                root.setAttribute(COMMENT, getLastMetaComment());
+            }
+//			if (getLastDelimiter() != ';') {
+//				throw new BadFormatException("Expecting ';' after tree");
+//			}
+
+            return new FlexibleTree(root, false, true);
+
+        } catch (EOFException e) {
+            throw new ImportException("incomplete tree");
+        }
+    }
+
+    /**
+     * importTrees.
+     */
+    public Tree[] importTrees(TaxonList taxonList) throws IOException, ImportException {
+        boolean done = false;
+        ArrayList<FlexibleTree> array = new ArrayList<FlexibleTree>();
+
+        do {
+
+            try {
+
+                skipUntil("(");
+                unreadCharacter('(');
+
+                FlexibleNode root = readInternalNode(taxonList);
+                FlexibleTree tree = new FlexibleTree(root, false, true);
+                array.add(tree);
+
+                if (taxonList == null) {
+                    taxonList = tree;
+                }
+
+                if (readCharacter() != ';') {
+                    throw new BadFormatException("Expecting ';' after tree");
+                }
+
+            } catch (EOFException e) {
+                done = true;
+            }
+        } while (!done);
+
+        Tree[] trees = new Tree[array.size()];
+        array.toArray(trees);
+
+        return trees;
+    }
+
+    /**
+     * return whether another tree is available.
+     */
+    public boolean hasTree() throws IOException, ImportException {
+        try {
+            skipUntil("(");
+            unreadCharacter('(');
+        } catch (EOFException e) {
+            lastTree = null;
+            return false;
+        }
+
+        return true;
+    }
+
+    private Tree lastTree = null;
+
+    /**
+     * import the next tree.
+     * return the tree or null if no more trees are available
+     */
+    public Tree importNextTree() throws IOException, ImportException {
+        FlexibleTree tree = null;
+
+        try {
+            skipUntil("(");
+            unreadCharacter('(');
+
+            FlexibleNode root = readInternalNode(lastTree);
+
+            tree = new FlexibleTree(root, false, true);
+
+        } catch (EOFException e) {
+            //
+        }
+
+        lastTree = tree;
+
+        return tree;
+    }
+
+    /**
+     * Reads a branch in. This could be a node or a tip (calls readNode or readTip
+     * accordingly). It then reads the branch length and SimpleNode that will
+     * point at the new node or tip.
+     */
+    private FlexibleNode readBranch(TaxonList taxonList) throws IOException, ImportException {
+        double length = 0.0;
+        FlexibleNode branch;
+
+        if (nextCharacter() == '(') {
+            // is an internal node
+            branch = readInternalNode(taxonList);
+
+        } else {
+            // is an external node
+            branch = readExternalNode(taxonList);
+        }
+
+        final String comment = getLastMetaComment();
+        if (comment != null) {
+            branch.setAttribute(COMMENT, comment);
+            clearLastMetaComment();
+        }
+
+        if (getLastDelimiter() == ':') {
+            length = readDouble(",():;");
+        }
+
+        branch.setLength(length);
+
+        return branch;
+    }
+
+    /**
+     * Reads a node in. This could be a polytomy. Calls readBranch on each branch
+     * in the node.
+     */
+    private FlexibleNode readInternalNode(TaxonList taxonList) throws IOException, ImportException {
+        FlexibleNode node = new FlexibleNode();
+
+        // read the opening '('
+        final char ch = readCharacter();
+        assert ch == '(';
+
+        // read the first child
+        node.addChild(readBranch(taxonList));
+
+        // an internal node must have at least 2 children
+        if (getLastDelimiter() != ',') {
+            throw new BadFormatException("Expecting ',' in tree, but got '" + (char) getLastDelimiter() + "'");
+        }
+
+        // read subsequent children
+        do {
+            node.addChild(readBranch(taxonList));
+
+        } while (getLastDelimiter() == ',');
+
+        // should have had a closing ')'
+        if (getLastDelimiter() != ')') {
+            throw new BadFormatException("Missing closing ')' in tree");
+        }
+
+        // If there is a label before the colon, store it:
+        try {
+            String label = readToken(",():;");
+            if (label.length() > 0) {
+                node.setAttribute("label", label);
+            }
+        } catch (IOException ioe) {
+            // probably an end of file without a terminal ';'
+            // we are going to allow this and return the nodes...
+        }
+
+        return node;
+    }
+
+    /**
+     * Reads an external node in.
+     */
+    private FlexibleNode readExternalNode(TaxonList taxonList) throws IOException, ImportException {
+        FlexibleNode node = new FlexibleNode();
+
+        String label = readToken(":(),;");
+
+        Taxon taxon;
+
+        if (taxonList != null) {
+            // if a taxon list is given then the taxon must be in it...
+            int index = taxonList.getTaxonIndex(label);
+            if (index != -1) {
+                taxon = taxonList.getTaxon(index);
+            } else {
+                throw new UnknownTaxonException("Taxon in tree, '" + label + "' is unknown");
+            }
+        } else {
+            // No taxon list given so create new taxa
+            taxon = new Taxon(label);
+        }
+
+        node.setTaxon(taxon);
+        return node;
+    }
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterGibbsOperator.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterGibbsOperator.java
new file mode 100644
index 0000000..586870f
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterGibbsOperator.java
@@ -0,0 +1,1090 @@
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.util.LinkedList;
+import java.util.logging.Logger;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Likelihood;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.GammaFunction;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.math.distributions.WishartDistribution;
+import dr.math.matrixAlgebra.Matrix;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.*;
+
+
+/**
+ * A Gibbs operator for allocation of items to clusters under a distance dependent Chinese restaurant process.
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+public class TreeClusterGibbsOperator extends SimpleMCMCOperator implements GibbsOperator{
+
+	//Parameter locationDrift;  // no longer need to know
+	Parameter virusOffsetsParameter;
+	private double sigmaSq =1;
+	private int numdata = 0; //NEED TO UPDATE
+	//private double[] groupSize; 
+    
+    private MatrixParameter mu = null;
+    private Parameter clusterLabels = null;
+    private Parameter K = null;
+    
+    private MatrixParameter virusLocations = null;
+    
+    private int maxLabel = 0;
+    private int[] muLabels = null;
+
+    private int[] groupSize;
+  //  public ClusterViruses clusterLikelihood = null;
+
+
+   private double numAcceptMoveMu = 0;
+   private double numProposeMoveMu = 0;
+   
+   private double numAcceptMoveC = 0;
+   private double numProposeMoveC = 0;
+   
+   private int isMoveMu = -1;
+    
+   
+	private double[] old_vLoc0 ;
+	private double[] old_vLoc1 ;
+
+    private Parameter clusterOffsetsParameter;
+    private AGLikelihoodTreeCluster clusterLikelihood = null;
+   
+    private int groupSelectedChange = -1;
+    private int virusIndexChange = -1;
+    private double originalValueChange = -1;
+    private int dimSelectChange = -1;
+    
+    private double[] mu0_offset;
+    
+    private Parameter indicators = null;
+
+    private int binSize=20;
+ 
+    
+    private Parameter excisionPoints;
+    
+    private TreeModel treeModel;
+
+    
+   // private int[] piIndicator = new int[numSites];
+
+	
+    //public ClusterAlgorithmOperator(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter locationDrift_in, Parameter clusterOffsetsParameter) {
+    public TreeClusterGibbsOperator(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter clusterOffsetsParameter, Parameter indicatorsParameter, Parameter excisionPointsParameter, TreeModel treeModel_in, AGLikelihoodTreeCluster clusterLikelihood_in) {
+    	
+      	
+    	System.out.println("Loading the constructor for ClusterGibbsOperator");
+    	
+    	this.clusterLikelihood = clusterLikelihood_in;
+		this.treeModel= treeModel_in;
+    	this.mu = mu;
+    	this.K = K;
+    	this.clusterLabels = clusterLabels;    	
+    //	this.clusterLikelihood = clusterLikelihood;
+        this.virusLocations = virusLocations;
+        this.virusOffsetsParameter = virusOffsetsParameter;
+    //    this.locationDrift = locationDrift_in;  //no longer need
+        this.clusterOffsetsParameter = clusterOffsetsParameter;
+    	this.indicators = indicatorsParameter;   
+    	
+    	this.excisionPoints = excisionPointsParameter;
+
+        
+        numdata = virusOffsetsParameter.getSize();
+        System.out.println("numdata="+ numdata);
+        
+        
+        int K_int = (int) K.getParameterValue(0);
+        
+        
+        System.out.println("K_int=" + K_int);
+        groupSize = new int[binSize];
+        for(int i=0; i < binSize; i++){
+        	groupSize[i] = 0;
+        }
+                
+        
+        for(int i=0; i < numdata; i++){
+        	//System.out.println("i="+ i);
+        	int index = (int) clusterLabels.getParameterValue(i);
+        	groupSize[ index]++;
+        }
+    	
+        for(int i=0; i < numdata;i++){
+    		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+    			maxLabel = (int) clusterLabels.getParameterValue(i);
+    		}
+    	}
+        
+        //NEED maxGROUP
+        
+        //for(int i=0; i < K_int; i++){
+        	//System.out.println("groupSize=" + groupSize[i]);
+        //}
+        
+        
+        muLabels = new int[binSize];
+        
+        for(int i=0; i < maxLabel; i++){
+        	int j=0;
+            if(groupSize[i] >0){
+            	muLabels[j] = i;
+            	j++;
+            }
+        }
+ 
+        //muLabels ...
+        
+        
+        setWeight(weight);
+        
+        System.out.println("Finished loading the constructor for ClusterAlgorithmOperator");
+   
+    }
+    
+
+ 
+    /**
+     * change the parameter and return the log hastings ratio.
+     */
+    public final double doOperation() {
+    	
+    	
+    	int numNodes = treeModel.getNodeCount(); 
+
+    	
+    	double logHastingRatio = 0;  	
+    	double chooseOperator = Math.random();
+    	
+    	int K_int = (int) K.getParameterValue(0);
+    	    	
+    	
+    	int selectedI = -1;
+
+    	
+    	//System.out.println("AG likelihood cluster loaded");
+    	//System.exit(0);
+    	
+    	//Gibbs move:
+		double []logNumeratorProb = new double[numNodes];
+        	
+
+  		int isOn = 0;
+  		int I_selected = -1;
+  		
+  		//obtain a random order of the "on" sites...
+  		
+  		while(isOn == 0){
+  			I_selected = (int) (Math.floor(Math.random()*binSize));
+  			isOn = (int) excisionPoints.getParameterValue(I_selected);
+  	    	//find an "on" excision point to move.
+  			if(isOn==1){
+  				
+//  				System.out.println("begin");
+  				
+  				int originalSite = (int) indicators.getParameterValue(I_selected);
+  //				System.out.println("original site is = " + originalSite);
+  				
+  				
+  				//Determining the number of steps from the original site
+  				int []numStepsFromOrigin = new int[numNodes];
+  				for(int i=0; i < numNodes; i++){
+  					numStepsFromOrigin[i] = 100000;
+  				}
+  				
+	  			int curElementNumber =(int) indicators.getParameterValue(I_selected);
+	  			int rootElementNumber = curElementNumber;
+	  			//System.out.println("curElementNumber=" + curElementNumber);
+	  			NodeRef curElement = treeModel.getNode(curElementNumber); 
+	  			
+	  		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+	  		    LinkedList<NodeRef> fromlist = new LinkedList<NodeRef>();
+	  		    LinkedList<Integer> nodeLevel = new LinkedList<Integer>();
+	  		    
+	  		    LinkedList<Integer> possibilities = new LinkedList<Integer>();
+	  		    
+	  		    NodeRef dummyNode = null;
+	  		    visitlist.add(curElement);
+	  		    fromlist.add(dummyNode);
+	  		    nodeLevel.add(new Integer(0));
+	  		    
+	  		    int maxNodeLevel = 1000;
+  	  		    
+  	  		  //System.out.println("root node " + curElement.getNumber());
+  			    while(visitlist.size() > 0){
+  					
+  		  			if(treeModel.getParent(curElement) != null){
+  		  				//add parent
+  				  			NodeRef node= treeModel.getParent(curElement);	  		  			
+  		  				if(fromlist.getFirst() != node){
+  		  					if( nodeLevel.getFirst() < maxNodeLevel){
+  		  						visitlist.add(node);
+  		  		  				fromlist.add(curElement);
+  		  		  				nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+  		  		  				//System.out.println("node " +  node.getNumber() + " added, parent of " + curElement.getNumber());
+  		  					}
+  		  				}
+  		  			}
+
+  					
+  		  			for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+  		  				NodeRef node= treeModel.getChild(curElement,childNum);
+  		  				if(fromlist.getFirst() != node){
+  		  					if( nodeLevel.getFirst() < maxNodeLevel){
+  		  						visitlist.add(node);
+  		  						fromlist.add(curElement);
+  		  						nodeLevel.add(new Integer(nodeLevel.getFirst()+1));
+  		  						//System.out.println("node " +  node.getNumber() + " added, child of " + curElement.getNumber());
+  		  					}
+  		  				}
+  		  	        }
+  		  			
+
+
+
+  		  			//System.out.println("visited " + curElement.getNumber());
+  		  			//test if I can add curElement.getNumber()
+  		  				int site_test = curElement.getNumber();
+  			  			int hasBeenAdded=0;
+  			  			for(int i=0; i < binSize; i++){
+  			  				if( indicators.getParameterValue(i) == site_test){
+  			  					hasBeenAdded=1;
+  			  					break;
+  			  				}
+  			  			}
+  			  			if(hasBeenAdded==0 || curElement.getNumber() == rootElementNumber ){
+  			  				//System.out.println("to possibilities: add " + site_test);
+  			  				numStepsFromOrigin[site_test] = nodeLevel.getFirst();
+  			  				possibilities.addLast(new Integer( site_test));
+  			  			}
+  			  			else{
+  			  				//System.out.println("element " + curElement.getNumber() + " is already an excision point");
+  			  			}
+  		  			
+  	  		  			visitlist.pop();
+  	  		  			fromlist.pop();
+  	  		  			nodeLevel.pop();
+  			  			
+  		  			if(visitlist.size() > 0){
+  		  				curElement = visitlist.getFirst();
+  		  			}
+  		  			
+  		  			
+  				}
+  		  			
+  				  				
+  				
+  				//Calculating the conditional probability
+  				for(int curSite = 0; curSite < numNodes; curSite++){
+
+					//check if a site has been added
+					int hasBeenAdded=0;
+		  			for(int i=0; i < binSize; i++){
+		  				if( indicators.getParameterValue(i) == curSite){
+		  					hasBeenAdded=1;
+		  					break;
+		  				}
+		  			}
+		  			//site that has been added will be zeroed out.
+		  			if(hasBeenAdded==1){
+		  				double inf = Double.NEGATIVE_INFINITY;
+		  				logNumeratorProb[curSite]  = inf;
+
+		  			}
+		  			else{
+		  				//calculate the numerator of the conditional probability
+
+		  	    		//select that node, change the Clusterlabels, and virus offsets, calculate loglikelihood from AGLikelihoodCluster and store
+		  	    		//perform Gibbs sampling
+		  	    	   	//change the cluster labels and virus offsets
+		  				int site_add = curSite; 
+		  				//set to new sample
+		  				indicators.setParameterValue(I_selected, site_add);
+
+
+		  				
+		  		    	//for each node that is not occupied,
+		  				//select that node, change the Clusterlabels, and virus offsets, calculate loglikelihood from AGLikelihoodCluster and store
+		  				//perform Gibbs sampling
+		  			   	//change the cluster labels and virus offsets
+		  			
+		  				
+		  		  		//swapped to the new on site
+		  		  		//nonZeroIndexes[I_off] = site_on; 
+		  		  		
+		  		  		
+		  		    	//K IS CHANGED ACCORDINGLY
+		  				int K_count = 0; //K_int gets updated
+		  				for(int i=0; i < binSize; i++){
+		  					K_count += (int) excisionPoints.getParameterValue(i);
+		  				}
+		  				//System.out.println("K now becomes " + K_count);
+		  				
+		  				
+		  				//Remove the commenting out later.
+		  				K.setParameterValue(0, K_count); //update 
+		  				K_int = K_count;
+		  				
+		  		    	//use the tree to re-partition according to the change.
+		  				setClusterLabels(K_int);
+
+		  					
+		  					//change the mu in the toBin and fromBIn
+		  					//borrow from getLogLikelihood:
+
+		  					double[] meanYear = new double[binSize];
+		  					double[] groupCount = new double[binSize];
+		  					for(int i=0; i < numdata; i++){
+		  						int label = (int) clusterLabels.getParameterValue(i);
+		  						double year  = 0;
+		  				        if (virusOffsetsParameter != null) {
+		  				            //	System.out.print("virus Offeset Parameter present"+ ": ");
+		  				            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+		  				            //	System.out.print(" drift= " + drift + " ");
+		  				                year = virusOffsetsParameter.getParameterValue(i);   //just want year[i]
+		  				                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+		  				            }
+		  				            else{
+		  				            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+		  				            }
+		  						meanYear[ label] = meanYear[ label] + year;
+		  						
+		  						groupCount[ label  ] = groupCount[ label ]  +1; 
+		  					}
+		  								
+		  					for(int i=0; i < binSize; i++){
+		  						if(groupCount[i] > 0){
+		  							meanYear[i] = meanYear[i]/groupCount[i];
+		  						}
+		  						//System.out.println(meanYear[i]);
+		  					}
+		  			
+
+		  					mu0_offset = new double[binSize];
+		  					//double[] mu1 = new double[maxLabel];
+		  							
+		  					
+		  					//System.out.println("maxLabel=" + maxLabel);
+		  					//now, change the mu..
+		  					for(int i=0; i < binSize; i++){
+		  						//System.out.println(meanYear[i]*beta);
+		  						mu0_offset[i] =  meanYear[i];
+		  						//System.out.println("group " + i + "\t" + mu0_offset[i]);
+		  					}	
+		  				//		System.out.println("=====================");
+		  					
+		  					
+		  					//Set  the vLoc to be the corresponding mu values , and clusterOffsetsParameter to be the corresponding offsets
+		  			    	//virus in the same cluster has the same position
+		  			    	for(int i=0; i < numdata; i++){
+		  			        	int label = (int) clusterLabels.getParameterValue(i);
+		  			    		Parameter vLoc = virusLocations.getParameter(i);
+		  			    		//setting the virus locs to be equal to the corresponding mu
+		  			    			double muValue = mu.getParameter(label).getParameterValue(0);    			
+		  			    			vLoc.setParameterValue(0, muValue);
+		  			    			double	muValue2 = mu.getParameter(label).getParameterValue(1);
+		  			   				vLoc.setParameterValue(1, muValue2);
+		  				   			//System.out.println("vloc="+ muValue + "," + muValue2);
+		  			    	}
+		  			    	
+		  			    	for(int i=0; i < numdata; i++){
+		  			        	int label = (int) clusterLabels.getParameterValue(i);
+		  			   			//if we want to apply the mean year virus cluster offset to the cluster
+		  			   			if(clusterOffsetsParameter != null){
+		  			   			//setting the clusterOffsets to be equal to the mean year of the virus cluster
+		  			   				// by doing this, the virus changes cluster AND updates the offset simultaneously
+		  			   				clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);
+		  			   			}
+		  		     				//		System.out.println("mu0_offset[label]=" + mu0_offset[label]);
+		  		     		//		System.out.println("clusterOffsets " +  i +" now becomes =" + clusterOffsetsParameter.getParameterValue(i) );   			
+		  			    	}
+
+		  	    	
+		  			    	logNumeratorProb[curSite] = clusterLikelihood.getLogLikelihood();
+		  			    	//System.out.println(clusterLikelihood.getLogLikelihood());
+		  				
+		  				
+		  			}
+
+  				}//close for loop
+  				
+  				selectedI = I_selected;
+  			  	
+  		  		
+  		  		
+  		  		double maxLogProb = logNumeratorProb[0];
+  		  		for(int i=0; i < numNodes; i++ ){
+  		  			if(logNumeratorProb[i] > maxLogProb){
+  		  				maxLogProb = logNumeratorProb[i];
+  		  			}
+  		  		}
+  		  		
+  		  		//System.out.println(maxLogProb);
+  		  		
+  		  		
+  		  		double sumLogDenominator = 0;
+  		  		 
+  		  		for(int i=0; i < numNodes; i++){
+  		  			if(logNumeratorProb[i] != Double.NEGATIVE_INFINITY){
+  		  				sumLogDenominator += Math.exp((logNumeratorProb[i]-maxLogProb));
+  		  			}
+  		  		}
+  		  		sumLogDenominator = Math.log(sumLogDenominator) + maxLogProb;
+  		  		
+  		  		double sumProb = 0;
+  		  		double []condProb = new double[numNodes]; 
+  		  		for(int i=0; i < numNodes; i++){
+  		  			condProb[i] = Math.exp( logNumeratorProb[i] - sumLogDenominator   );
+  					//System.out.println("condProb of site " + i + " = " + condProb[i]);
+  					sumProb +=condProb[i];
+  					if(condProb[i] > 0.01){
+  //						System.out.println("**site " + i + " with prob=" + condProb[i]  + "  steps from previous=" + numStepsFromOrigin[i]);
+  						
+  					}
+  		  		}
+  		  		
+  		  		//System.out.println("sum up to " + sumProb);
+  		  		
+  		  		
+  		  		//int site_add = MathUtils.randomChoicePDF(condProb); //seems to not be working properly
+  		  	int site_add = MathUtils.randomChoicePDF(condProb); //seems to not be working properly
+
+				if(numStepsFromOrigin[site_add] >0){
+					System.out.println("Gibbs move: indicator " + I_selected +" from site " + originalSite + " to "+  site_add + " , chosen with prob =" + condProb[site_add] + " steps from previous placement=" + numStepsFromOrigin[site_add] );
+				}
+				
+//  		  		System.out.println("indicator " + I_selected +" from site " + originalSite + " to "+  site_add + " , chosen with prob =" + condProb[site_add] + " steps from previous placement=" + numStepsFromOrigin[site_add] );
+  				indicators.setParameterValue(I_selected, site_add);
+  				
+
+  			} //close if isOn.
+  		}    	  		
+  		
+
+
+
+ // 		MathUtils.randomChoicePDF(pairwiseDivergenceCountPerSite[group1][group2])
+    	
+  		
+  	//	for(int curSite=0; curSite < numNodes; curSite++){
+  	//		System.out.println("log condProb of site " + curSite + " = " + logNumeratorProb[curSite] + " and scaled=" +  (logNumeratorProb[curSite]-maxLogProb) );
+  	//	}
+  		
+  		
+		//normalize the score into probability distribution.
+		
+		//(int) Math.floor( Math.random()*numNodes );
+//		int site_add = SAMPLE FROM THE DISTRIBUTION; 
+		//set to new sample
+	//	indicators.setParameterValue(I_selected, site_add);
+
+
+		
+    	//for each node that is not occupied,
+		//select that node, change the Clusterlabels, and virus offsets, calculate loglikelihood from AGLikelihoodCluster and store
+		//perform Gibbs sampling
+	   	//change the cluster labels and virus offsets
+	
+ 	
+    	
+		//====================================================================================================
+		//After finishing the proposal
+		//====================================================================================================
+		
+		
+		
+  		//swapped to the new on site
+  		//nonZeroIndexes[I_off] = site_on; 
+  		
+  		
+    	//K IS CHANGED ACCORDINGLY
+		int K_count = 0; //K_int gets updated
+		for(int i=0; i < binSize; i++){
+			K_count += (int) excisionPoints.getParameterValue(i);
+		}
+		//System.out.println("K now becomes " + K_count);
+		
+		
+		//Remove the commenting out later.
+		K.setParameterValue(0, K_count); //update 
+		K_int = K_count;
+		
+    	//use the tree to re-partition according to the change.
+		setClusterLabels(K_int);
+
+			
+			//change the mu in the toBin and fromBIn
+			//borrow from getLogLikelihood:
+
+			double[] meanYear = new double[binSize];
+			double[] groupCount = new double[binSize];
+			for(int i=0; i < numdata; i++){
+				int label = (int) clusterLabels.getParameterValue(i);
+				double year  = 0;
+		        if (virusOffsetsParameter != null) {
+		            //	System.out.print("virus Offeset Parameter present"+ ": ");
+		            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+		            //	System.out.print(" drift= " + drift + " ");
+		                year = virusOffsetsParameter.getParameterValue(i);   //just want year[i]
+		                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+		            }
+		            else{
+		            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+		            }
+				meanYear[ label] = meanYear[ label] + year;
+				
+				groupCount[ label  ] = groupCount[ label ]  +1; 
+			}
+						
+			for(int i=0; i < binSize; i++){
+				if(groupCount[i] > 0){
+					meanYear[i] = meanYear[i]/groupCount[i];
+				}
+				//System.out.println(meanYear[i]);
+			}
+	
+
+			mu0_offset = new double[binSize];
+			//double[] mu1 = new double[maxLabel];
+					
+			
+			//System.out.println("maxLabel=" + maxLabel);
+			//now, change the mu..
+			for(int i=0; i < binSize; i++){
+				//System.out.println(meanYear[i]*beta);
+				mu0_offset[i] =  meanYear[i];
+				//System.out.println("group " + i + "\t" + mu0_offset[i]);
+			}	
+		//		System.out.println("=====================");
+			
+			
+			//Set  the vLoc to be the corresponding mu values , and clusterOffsetsParameter to be the corresponding offsets
+	    	//virus in the same cluster has the same position
+	    	for(int i=0; i < numdata; i++){
+	        	int label = (int) clusterLabels.getParameterValue(i);
+	    		Parameter vLoc = virusLocations.getParameter(i);
+	    		//setting the virus locs to be equal to the corresponding mu
+	    			double muValue = mu.getParameter(label).getParameterValue(0);    			
+	    			vLoc.setParameterValue(0, muValue);
+	    			double	muValue2 = mu.getParameter(label).getParameterValue(1);
+	   				vLoc.setParameterValue(1, muValue2);
+		   			//System.out.println("vloc="+ muValue + "," + muValue2);
+	    	}
+	    	
+	    	for(int i=0; i < numdata; i++){
+	        	int label = (int) clusterLabels.getParameterValue(i);
+	   			//if we want to apply the mean year virus cluster offset to the cluster
+	   			if(clusterOffsetsParameter != null){
+	   			//setting the clusterOffsets to be equal to the mean year of the virus cluster
+	   				// by doing this, the virus changes cluster AND updates the offset simultaneously
+	   				clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);
+	   			}
+     				//		System.out.println("mu0_offset[label]=" + mu0_offset[label]);
+     		//		System.out.println("clusterOffsets " +  i +" now becomes =" + clusterOffsetsParameter.getParameterValue(i) );   			
+	    	}
+
+	    	
+
+	    	
+//	    	System.out.println("===The on nodes===");
+//	    	for(int i=0; i < binSize; i++){	    
+//	    		if((int) excisionPoints.getParameterValue(i) == 1){
+//	    			System.out.println("Cluster node " + i + " = " + (int) indicators.getParameterValue(i) + "\tstatus=" + (int) excisionPoints.getParameterValue(i));
+//	    		}
+//	    	}
+	    	
+	    	
+	    	
+	    	
+	    	//Hasting's Ratio is p(old |new)/ p(new|old)
+
+	    	//System.out.println("Done doing operation!");
+			
+	    		    	
+	    	
+    	//return(logHastingRatio); //log hasting ratio
+    	return(logHastingRatio);
+    	
+    }
+    	
+    	
+    private void setClusterLabels(int K_int) {
+
+        int numNodes = treeModel.getNodeCount();
+        int[] cutNodes = new int[K_int];
+ 	   int cutNum = 0;
+ 	   String content = "";
+        for(int i=0; i < binSize; i++){
+     	   if( (int) excisionPoints.getParameterValue( i ) ==1 ){
+     		   cutNodes[cutNum] = (int) indicators.getParameterValue(i);
+     		   content += (int) indicators.getParameterValue(i) + ",";
+     		   cutNum++;
+     	   }
+     	  
+        }
+       // System.out.println(content);
+        
+        if(cutNum != K_int){
+        	System.out.println("cutNum != K_int. we got a problem");
+        }
+          
+    //    for(int i=0; i < K_int; i++){
+    // 	   System.out.println(cutNodes[i]);
+     //   }
+        
+        //int []membership = determine_membership(treeModel, cutNodes, K_int-1);
+        int []membership = determine_membership(treeModel, cutNodes, K_int);
+        
+        double uniqueCode = 0;
+        for(int i=0; i < numNodes; i++){
+        	uniqueCode += membership[i]*i;
+        }
+      //  System.out.println(" sum = " + uniqueCode);
+        
+     //   System.out.println("number of nodes = " + treeModel.getNodeCount());
+      //  for(int i=0; i < treeModel.getNodeCount(); i++){
+     //	   System.out.println(membership[i]);
+      //  }
+        
+        
+        //System.out.println("Done");
+        
+      //  for(int i=0; i < numdata; i++){
+ 	//	   Parameter v = virusLocations.getParameter(i);
+ 	//	   String curName = v.getParameterName();
+ 	//	   System.out.println("i=" + i + " = " + curName);       
+ 	//	}       
+        
+      //  for(int j=0; j < numdata; j++){
+     //	   System.out.println("j=" + j + " = " + treeModel.getTaxonId(j));
+      //  }
+        
+        
+ 	//   Parameter vv = virusLocations.getParameter(0);
+ 	 //  String curNamev = vv.getParameterName();
+ 	   
+ 	 //  System.out.println(curNamev + " and " +treeModel.getTaxonId(392) );
+ 	   //System.out.println(  curNamev.equals(treeModel.getTaxonId(392) )  );
+ 	   
+        
+        //System.exit(0);
+        
+ 	  // System.out.println("numNodes=" + numNodes);
+ 	  // System.exit(0);
+        //create dictionary:
+ 	   
+ 	   //I suspect this is an expensive operation, so I don't want to do it many times,
+ 	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+        int []membershipToClusterLabelIndexes = new int[numdata]; 
+        for(int i=0; i < numdata; i++){
+ 		   Parameter v = virusLocations.getParameter(i);
+ 		   String curName = v.getParameterName();
+ 		  // System.out.println(curName);
+ 		   int isFound = 0;
+     	   for(int j=0; j < numNodes; j++){
+     		   String treeId = treeModel.getTaxonId(j);
+     		   if(curName.equals(treeId) ){
+     		//	   System.out.println("  isFound at j=" + j);
+     			   membershipToClusterLabelIndexes[i] = j;
+     			   isFound=1;
+     			   break;
+     		   }
+     		   
+     	   }
+     	   if(isFound ==0){
+     		   System.out.println("not found. Exit now.");
+     		   System.exit(0);
+     	   }     	   
+        }
+        
+        
+       // System.exit(0);
+        
+      //  for(int i=0; i < numdata; i++){
+     //	   System.out.println(membershipToClusterLabelIndexes[i]);
+      //  }
+       // System.exit(0);
+        
+        for(int i=0; i < numdata; i++){
+     	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+     	   //so I have to search for the matching indexes
+     	   Parameter vloc = virusLocations.getParameter(i);
+  
+     	   
+//must uncomment out because this sets the new partitioning ... now i am doing code testing.     	   
+     	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+     	   //System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+     	   
+     	 //  Parameter v = virusLocations.getParameter(i);
+     	  // System.out.println(v.getParameterName());
+        }
+        
+
+    	
+	}
+
+    
+    
+    private static boolean isCutNode(int number, int cutNodes[], int numCut) {
+    	if(numCut > 0){
+    		for(int i=0; i < numCut; i++){
+    			if(number == cutNodes[i]){
+    				return true;
+    			}
+    		}
+    	}
+    	return false;
+    }
+    
+    
+
+  //traverse down the tree, top down, do calculation
+  static int[] determine_membership(TreeModel treeModel, int[] cutNodes, int numCuts){
+
+  	
+  	//TEMPORARY SOLUTION
+      //load in the titer, corresponding to the taxon #.
+  	
+  	 TiterImporter titer = null ;
+  	 
+      FileReader fileReader;
+  	try {
+  		fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/data/taxon_y_titer.txt");
+  	     titer = new TiterImporter(fileReader);	
+
+  	} catch (FileNotFoundException e) {
+  		// TODO Auto-generated catch block
+  		e.printStackTrace();
+  	}
+  	
+  NodeRef root = treeModel.getRoot();
+
+  int numClusters = 1;
+  LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+  list.addFirst(root);
+
+  int[] membership = new int[treeModel.getNodeCount()];
+  for(int i=0; i < treeModel.getNodeCount(); i++){
+  	membership[i] = -1;
+  }
+  membership[root.getNumber()] = 0; //root always given the first cluster
+        
+  while(!list.isEmpty()){
+  	//do things with the current object
+  	NodeRef curElement = list.pop();
+  	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+  	String content = "node #" + curElement.getNumber() +", taxon= " ;
+  	if(treeModel.getNodeTaxon(curElement)== null){
+  		content += "internal node\t";
+  	}
+  	else{
+  		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+  		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+  	}
+  	
+     	if(treeModel.getParent(curElement)== null){
+  		//content += "no parent";
+  	}
+  	else{
+  		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+  	}
+  	
+  	//cluster assignment:
+  	if(!treeModel.isRoot(curElement)){
+  	 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+  	//if(isCutNode(curElement.getNumber())){
+  		numClusters++ ;
+  		membership[ curElement.getNumber() ] = numClusters - 1; 
+    	}
+  	else{
+  		//inherit from parent's cluster assignment
+  		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+  	 }
+  	        	
+  	}//is not Root
+  	content += " cluster = " + membership[curElement.getNumber()] ; 
+  	
+  //	System.out.println(content);
+
+  	
+      for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+      	list.addFirst(treeModel.getChild(curElement,childNum));
+      }
+  }
+
+   return(membership);
+  }
+
+    
+    
+
+
+	public void accept(double deviation) {
+    	super.accept(deviation);
+
+    	/*
+    	if(isMoveMu==1){
+    		numAcceptMoveMu++;
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numAcceptMoveC++;
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/  
+    	        	
+    	//	if(virusIndexChange <5){
+    //		System.out.println("     -  Accepted!");
+    	//	}
+          	
+    }
+    
+    public void reject(){
+    	super.reject();
+ 
+    	
+    	/*
+    	//manually change mu back..
+    	if(isMoveMu==1){
+			 mu.getParameter(groupSelectedChange).setParameterValue(dimSelectChange, originalValueChange);
+    	}
+    	//manually change all the affected vLoc back...
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+    		Parameter vLoc = virusLocations.getParameter(i);   		
+    		//	double muValue = mu.getParameter(label).getParameterValue(0);
+    		//	vLoc.setParameterValue(0, muValue);
+    		//  double	muValue2 = mu.getParameter(label).getParameterValue(1);
+   			//	vLoc.setParameterValue(1, muValue2);
+	
+ 			clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);   			
+    	}
+    	*/
+    	
+    	
+    	/*
+    	if(isMoveMu==1){
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/
+    	//if(virusIndexChange < 5){
+		System.out.println("        	*      Rejected!");
+    	//}
+      	
+      	
+      	/*
+      	for(int i=0; i < numdata; i++){
+      		Parameter vLoc = virusLocations.getParameter(i);
+
+      		if( vLoc.getParameterValue(0) != old_vLoc0[i]){
+
+      			System.out.println("virus " + i + " is different: " + vLoc.getParameterValue(0) + " and " + old_vLoc0[i]);
+      		}
+      		
+      		//System.out.println(old_vLoc0[i] + ", " + old_vLoc1[i]);
+      		vLoc.setParameterValue(0, old_vLoc0[i]);
+      		vLoc.setParameterValue(1, old_vLoc1[i]);
+      		
+		}
+      	*/
+  		//System.exit(0);
+
+      	
+      	
+
+    }
+    
+	
+
+           public final static String TREE_CLUSTERGIBBS_OPERATOR = "TreeClusterGibbsOperator";
+
+              
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return TREE_CLUSTERGIBBS_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+        
+           
+        
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+            	
+
+                public final static String VIRUSLOCATIONS = "virusLocations";
+            	public final static String  MU = "mu";
+            	public final static String CLUSTERLABELS = "clusterLabels";
+            	public final static String K = "k";
+            	public final static String OFFSETS = "offsets";
+      //     	public final static String LOCATION_DRIFT = "locationDrift"; //no longer need
+            	
+                public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
+                
+            	public final static String INDICATORS = "indicators";
+
+                public final static String EXCISION_POINTS = "excisionPoints";
+
+
+                public String getParserName() {
+                    return TREE_CLUSTERGIBBS_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+                	
+                	//System.out.println("Parser run. Exit now");
+                	//System.exit(0);
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+                    
+                    XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                        MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        
+                        cxo = xo.getChild(MU);
+                        MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(CLUSTERLABELS);
+                        Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                        cxo = xo.getChild(K);
+                        Parameter k = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(OFFSETS);
+                        Parameter offsets = (Parameter) cxo.getChild(Parameter.class);
+ 
+//                        cxo = xo.getChild(LOCATION_DRIFT);
+//                        Parameter locationDrift = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        Parameter clusterOffsetsParameter = null;
+                        if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+                        	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+                        }
+
+                        cxo = xo.getChild(INDICATORS);
+                        Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                      
+                        cxo = xo.getChild(EXCISION_POINTS);
+                        Parameter excisionPoints = (Parameter) cxo.getChild(Parameter.class);
+                      
+                    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+ 
+                    AGLikelihoodTreeCluster agLikelihood = (AGLikelihoodTreeCluster) xo.getChild(AGLikelihoodTreeCluster.class);
+                        
+                    //return new ClusterAlgorithmOperator(virusLocations, mu, clusterLabels, k, weight, offsets, locationDrift, clusterOffsetsParameter);
+                        return new TreeClusterGibbsOperator(virusLocations, mu, clusterLabels, k, weight, offsets,  clusterOffsetsParameter, indicators, excisionPoints, treeModel, agLikelihood);
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "An operator that picks a new allocation of an item to a cluster under the Dirichlet process.";
+                }
+
+                public Class getReturnType() {
+                    return TreeClusterGibbsOperator.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                        
+                        new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                        new ElementRule(MU, Parameter.class),
+                        new ElementRule(CLUSTERLABELS, Parameter.class),
+                        new ElementRule(K, Parameter.class),
+                        new ElementRule(OFFSETS, Parameter.class),
+                  //      new ElementRule(LOCATION_DRIFT, Parameter.class), //no longer needed
+   //                    
+                       new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),  // no longer REQUIRED
+                       new ElementRule(INDICATORS, Parameter.class),
+                       new ElementRule(EXCISION_POINTS, Parameter.class),
+                       new ElementRule(TreeModel.class),
+
+        
+            };
+            
+            };
+
+
+        
+            public int getStepCount() {
+                return 1;
+            }
+
+        }
+
+
+
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterSequentialSampling.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterSequentialSampling.java
new file mode 100644
index 0000000..654a0de
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/TreeClusterSequentialSampling.java
@@ -0,0 +1,1482 @@
+
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Likelihood;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.GammaFunction;
+import dr.math.MathUtils;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.math.distributions.WishartDistribution;
+import dr.math.matrixAlgebra.Matrix;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.*;
+
+
+/**
+ * A Gibbs operator for allocation of items to clusters under a distance dependent Chinese restaurant process.
+ *
+ * @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class TreeClusterSequentialSampling extends SimpleMCMCOperator implements GibbsOperator{
+	//Parameter locationDrift;  // no longer need to know
+	Parameter virusOffsetsParameter;
+	private double sigmaSq =1;
+	private int numdata = 0; //NEED TO UPDATE
+	//private double[] groupSize; 
+    
+    private MatrixParameter mu = null;
+    private Parameter clusterLabels = null;
+    private Parameter K = null;
+    
+    private MatrixParameter virusLocations = null;
+    
+    private int maxLabel = 0;
+    private int[] muLabels = null;
+
+    private int[] groupSize;
+  //  public ClusterViruses clusterLikelihood = null;
+
+
+   private double numAcceptMoveMu = 0;
+   private double numProposeMoveMu = 0;
+   
+   private double numAcceptMoveC = 0;
+   private double numProposeMoveC = 0;
+   
+   private int isMoveMu = -1;
+    
+   
+	private double[] old_vLoc0 ;
+	private double[] old_vLoc1 ;
+
+    private Parameter clusterOffsetsParameter;
+    private AGLikelihoodTreeCluster clusterLikelihood = null;
+   
+    private int groupSelectedChange = -1;
+    private int virusIndexChange = -1;
+    private double originalValueChange = -1;
+    private int dimSelectChange = -1;
+    
+    private double[] mu0_offset;
+    
+    private Parameter breakPoints = null;
+
+    private int binSize=20;
+ 
+    
+    private Parameter status;
+    
+    private TreeModel treeModel;
+
+    int []membershipToClusterLabelIndexes = null;
+    
+    private int numNodes;
+    
+    
+   // private int[] piIndicator = new int[numSites];
+
+	
+    //public ClusterAlgorithmOperator(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter locationDrift_in, Parameter clusterOffsetsParameter) {
+    public TreeClusterSequentialSampling(MatrixParameter virusLocations, MatrixParameter mu, Parameter clusterLabels, Parameter K, double weight, Parameter virusOffsetsParameter, Parameter clusterOffsetsParameter, Parameter breakPointsParameter, Parameter statusParameter, TreeModel treeModel_in, AGLikelihoodTreeCluster clusterLikelihood_in) {
+    	
+      	
+    	System.out.println("Loading the constructor for Sequential sampler");
+    	
+    	this.clusterLikelihood = clusterLikelihood_in;
+		this.treeModel= treeModel_in;
+    	this.mu = mu;
+    	this.K = K;
+    	this.clusterLabels = clusterLabels;    	
+    //	this.clusterLikelihood = clusterLikelihood;
+        this.virusLocations = virusLocations;
+        this.virusOffsetsParameter = virusOffsetsParameter;
+    //    this.locationDrift = locationDrift_in;  //no longer need
+        this.clusterOffsetsParameter = clusterOffsetsParameter;
+    	this.breakPoints= breakPointsParameter;   
+    	
+    	this.status = statusParameter;
+
+        
+        numdata = virusOffsetsParameter.getSize();
+        System.out.println("numdata="+ numdata);
+        
+        
+        int K_int = (int) K.getParameterValue(0);
+        
+        
+        System.out.println("K_int=" + K_int);
+        groupSize = new int[binSize];
+        for(int i=0; i < binSize; i++){
+        	groupSize[i] = 0;
+        }
+                
+        
+        for(int i=0; i < numdata; i++){
+        	//System.out.println("i="+ i);
+        	int index = (int) clusterLabels.getParameterValue(i);
+        	groupSize[ index]++;
+        }
+    	
+        for(int i=0; i < numdata;i++){
+    		if(maxLabel < (int) clusterLabels.getParameterValue(i)){
+    			maxLabel = (int) clusterLabels.getParameterValue(i);
+    		}
+    	}
+        
+        //NEED maxGROUP
+        
+        //for(int i=0; i < K_int; i++){
+        	//System.out.println("groupSize=" + groupSize[i]);
+        //}
+        
+        
+        muLabels = new int[binSize];
+        
+        for(int i=0; i < maxLabel; i++){
+        	int j=0;
+            if(groupSize[i] >0){
+            	muLabels[j] = i;
+            	j++;
+            }
+        }
+ 
+        //muLabels ...
+    
+        
+        
+        setWeight(weight);
+        
+        System.out.println("Finished loading the constructor for ClusterAlgorithmOperator");
+        
+                
+   
+    }
+    
+
+ 
+    /**
+     * change the parameter and return the log hastings ratio.
+     */
+    public final double doOperation() {
+    	
+    	System.out.println("do operation of sequential sampling");
+    	
+    	setMembershipToClusterLabelIndexes(); //run once in case the tree changes.
+    	
+    	numNodes = treeModel.getNodeCount(); 
+    	
+    	updateK();
+    	int K_int = (int) K.getParameterValue(0); 
+    	
+    	//System.out.println("K_int is " + K_int);
+    	
+    	int[] cutNode = new int[K_int]; //to store which nodes to add...
+   
+        int[] oldclusterLabelArray = new int[numNodes]; //everything 0.
+        int[] clusterLabelArray = null;
+        
+        int[] onPoints = new int[K_int];
+        int numOn=0;
+        for(int i=0; i < binSize; i++){
+        	if( (int)status.getParameterValue(i) ==1 ){
+        		onPoints[numOn] = i;
+        		numOn++;
+        	}
+        }
+        
+        
+        //Verify relationship 
+        //P(H|E0=a, Y, mu) = sum P(H|E0=a, E1=e1, Y, mu) x P(E1=e1| E0=a)
+        
+
+        cutNode = new int[1];
+    	cutNode[0] = 785;
+    	onPoints = new int[1];
+    	onPoints[0] = 0;
+       	//now translate the cutNodes into the breakpoints that are on [although here we don't use the indicators - elsewhere does
+    	resetStatusAndBreakpointsGivenCutNodes(cutNode, onPoints);//need to reset the status breakpoints by the testCutNode
+ 
+    	
+    	//use the tree to re-partition according to the change.
+		clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(cutNode);
+		relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+		for(int i=0; i < numdata; i++){
+			clusterLabels.setParameterValue(i, clusterLabelArray[i]); 	//set cluster label parameter 					
+		}
+		
+		//setVirusLocationAndOffsets(); //set virus locations, given the clusterlabels parameter
+		setVirusLocationAutoCorrelatedModel();		
+		
+		double topLogProb = clusterLikelihood.getLogLikelihood();
+		System.out.println(topLogProb);
+		   		
+    			
+
+		//now test the sum...
+        cutNode = new int[2];
+    	cutNode[0] = 785;
+    	onPoints = new int[2];
+    	onPoints[0] = 0;
+    	onPoints[1] = 1;
+ 
+		double []logNumerator = new double[numNodes];
+		//calculate the distribution for calculating introducing an excision point in each node
+		for(int curTest=0; curTest < numNodes; curTest++){	
+			
+			int hasBeenAdded = checkSiteHasBeenAddedToOnIndicators(curTest); 			//check if a site has already been added
+  			if(hasBeenAdded ==0){
+  				
+  				cutNode = new int[2];
+  				cutNode[0] = 785;
+  				cutNode[1] = curTest;
+
+  		    	resetStatusAndBreakpointsGivenCutNodes(cutNode, onPoints);//need to reset the status breakpoints by the testCutNode
+  		    	//use the tree to re-partition according to the change.
+  				clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(cutNode); //note that instead of using the indicators, it uses the testCutNodes directly
+  				relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+  				
+					//set cluster label parameter for testing 					
+  				for(int i=0; i < numdata; i++){
+  					clusterLabels.setParameterValue(i, clusterLabelArray[i]);
+  				}
+		  				  				
+   				//setVirusLocationAndOffsets();  //this uses the clusterLabels parameter
+   				setVirusLocationAutoCorrelatedModel(); //which depends on the status and breakpoints
+   				
+   				   							    		  					  				
+   				logNumerator[curTest] += clusterLikelihood.getLogLikelihood() ; 	//Calculate likelihood
+  			}
+  			else{
+	  			logNumerator[curTest] = Double.NEGATIVE_INFINITY;
+  				System.out.println("Don't calculate for node" + cutNode[0]);
+  			}
+
+		} //finished curTest
+		
+		
+		double answer = verifyAssumption(topLogProb, logNumerator);
+		
+		
+		System.out.println("the ratio is " + answer);
+		
+
+  		    	
+  		    	
+  		    	
+  		    	
+		
+		
+    	
+    	System.exit(0);
+
+        
+        
+        
+        
+        
+        
+        
+        
+        //switch
+        
+        double tmpMu1 = mu.getParameter(onPoints[7] +1).getParameterValue(0);
+        double tmpMu2 = mu.getParameter(onPoints[7] +1).getParameterValue(1);
+        
+        mu.getParameter(onPoints[7] +1).setParameterValue(0, mu.getParameter(onPoints[8] +1).getParameterValue(0));
+        mu.getParameter(onPoints[7] +1).setParameterValue(1, mu.getParameter(onPoints[8] +1).getParameterValue(1));
+        
+        mu.getParameter(onPoints[8] +1).setParameterValue(0, tmpMu1);
+        mu.getParameter(onPoints[8] +1).setParameterValue(1, tmpMu2);
+        
+        
+    	for(int curNode=8; curNode < K_int; curNode++){
+    		
+    		cutNode[0] = 785;
+    		cutNode[1] = 775;
+    		cutNode[2] = 763;
+    		cutNode[3] = 697;
+    		cutNode[4] = 747;
+    		cutNode[5] = 679;
+    		cutNode[6] = 662;
+    		//cutNode[7] = 521;  //although 526 will be better than 521
+    		cutNode[7] = 638;
+
+    		
+    		
+    		//calculate the conditional distribution of the curNode, given the current set of nodes
+    		double []logNumeratorProb = new double[numNodes];
+    		
+    		//calculate the distribution for calculating introducing an excision point in each node
+    		for(int curTest=0; curTest < numNodes; curTest++){	
+    			
+    			int hasBeenAdded = checkSiteHasBeenAddedToOnIndicators(curTest); 			//check if a site has already been added
+	  			if(hasBeenAdded ==0){
+	  	  		  	int[] testCutNode = new int[curNode+1];	    			//create the testCutNode, with adding the current test node
+	  	  		  	for(int i=0; i < curNode; i++){
+	  	  		  		testCutNode[i] = cutNode[i];
+	  	  		  	}
+	  	    		testCutNode[curNode] = curTest;
+
+	  		    	resetStatusAndBreakpointsGivenCutNodes(testCutNode, onPoints);//need to reset the status breakpoints by the testCutNode
+
+	  		    	
+	  		    	/*
+	  		    	System.out.print("Currently selected: [");
+	  		    	for(int i=0; i < binSize; i++){
+	  		    		if((int) status.getParameterValue(i) ==1){
+	  		    			System.out.print(  (int) breakPoints.getParameterValue(i) + ",");
+	  		    		}
+	  		    	}
+	  		    	System.out.println("]");
+	  		    	*/
+	  	    		//I suspect I need to change this as I modify the code..
+	  		       	//set the indicators, based on the cutnodes
+	  		    	
+	  		    	/*
+	  		    	int addCount=0;
+	  		    	for(int i=0; i < binSize; i++){
+	  		    		if( (int)status.getParameterValue(i) == 1){
+	  		    			breakPoints.setParameterValue(i, testCutNode[addCount]);
+	  		    			addCount++;
+	  		    		}
+	  		    		if(addCount == (curNode+1)){
+	  		    			break;
+	  		    		}
+	  		    	}
+	  		    	*/
+	  		    	//System.out.println("currently added " + addCount + " nodes");
+
+	  		    	
+	  	    		
+	  		    	//use the tree to re-partition according to the change.
+	  				clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(testCutNode); //note that instead of using the indicators, it uses the testCutNodes directly
+	  				relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+	  				
+  					//set cluster label parameter for testing 					
+	  				for(int i=0; i < numdata; i++){
+	  					clusterLabels.setParameterValue(i, clusterLabelArray[i]);
+	  				}
+	  					  				
+	  				
+	   				//setVirusLocationAndOffsets();  //this uses the clusterLabels parameter
+	   				setVirusLocationAutoCorrelatedModel(); //which depends on the status and breakpoints
+	   				
+	   				
+	   				if(curNode == 0 && curTest == 0){
+			    		for(int i=0; i < numdata; i++){
+			    			Parameter v = virusLocations.getParameter(i);
+			    			//v.setParameterValue(0, 0);
+			    			//v.setParameterValue(1, 0);
+			    		}
+	   				}
+	   							    		  					  				
+   			    	logNumeratorProb[curTest] = clusterLikelihood.getLogLikelihood(); 	//Calculate likelihood
+   			    	
+   			    	//if(curNode == 0 && curTest == 0){
+   			    		
+   			    		
+   			    		//for(int i=0; i < numdata; i++){
+   			    		//	Parameter v = virusLocations.getParameter(i);
+   			    		//	System.out.print(v.getParameterValue(0) +"," + v.getParameterValue(1)+"\t");
+   			    		//}
+   			    		//System.out.println("");
+   			    		
+   			    		//System.out.println(" * " + logNumeratorProb[curTest]);
+   			    		//System.exit(0);
+   			    	//}
+	  			}
+	  			else{
+		  			logNumeratorProb[curTest]  = Double.NEGATIVE_INFINITY; //dummy probability
+	  			}
+
+    		} //finished curTest
+    		
+    		 double []condDistribution = calculateConditionalProbabilityGivenLogNumeratorProb(logNumeratorProb);
+    		
+    		 
+    		 for(int i=0; i < numNodes; i++){
+    			// if(condDistribution[i] > 0.0000001){
+    				 System.out.println("node " + i + " p=" + condDistribution[i]);
+    			// }
+    		 }
+    		 System.out.println("===============================");
+    		 
+    		 
+    		// System.exit(0);
+    		 
+    		 int site_add = MathUtils.randomChoicePDF(condDistribution); //sample a site, given the conditioanl distribution
+    		
+  		  	//update the cutNode using a temporary array newCutNode - first copy the existing element, then add the new site
+  		  	int[] newCutNode = new int[curNode+1];
+  		  	for(int i=0; i < curNode; i++){
+  		  		newCutNode[i] = cutNode[i];
+  		  	}
+    		newCutNode[curNode] = site_add; //add the new site.
+    		cutNode = newCutNode;
+    								
+						
+			//now , after adding the node to cutNode, we need to update clusterLabel such that the labeling is consistent with the old cluster labels.. 
+				// because ie. cluster i always get mu i.
+			clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(newCutNode);
+			relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out		
+			
+			oldclusterLabelArray = clusterLabelArray; // keep the oldcluster label to build on top of it..			
+    	} //curNode    	
+
+    	
+    	
+    	
+    	//====================================================================================================
+		//After finishing the proposal
+		//====================================================================================================
+
+    	//Display:
+    	printCutNode(cutNode);
+
+       	//now translate the cutNodes into the breakpoints that are on [although here we don't use the indicators - elsewhere does
+    	resetStatusAndBreakpointsGivenCutNodes(cutNode, onPoints);//need to reset the status breakpoints by the testCutNode
+    	/*
+    	int addCount=0;
+    	for(int i=0; i < binSize; i++){
+    		if( (int)status.getParameterValue(i) == 1){
+    			breakPoints.setParameterValue(i, cutNode[addCount]);
+    			addCount++;
+    		}
+    	}
+    	*/
+ 
+    	
+    	//use the tree to re-partition according to the change.
+		clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(cutNode);
+		relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+		for(int i=0; i < numdata; i++){
+			clusterLabels.setParameterValue(i, clusterLabelArray[i]); 	//set cluster label parameter 					
+		}
+		
+		//setVirusLocationAndOffsets(); //set virus locations, given the clusterlabels parameter
+		setVirusLocationAutoCorrelatedModel();
+		
+		
+		
+		System.out.println(clusterLikelihood.getLogLikelihood());
+		
+		
+		
+		
+		
+		
+		
+		
+		//add manually to test...
+		cutNode[0] = 785;
+		cutNode[1] = 775;
+		cutNode[2] = 763;
+		cutNode[3] = 697;
+		cutNode[4] = 747;
+		cutNode[5] = 679;
+		cutNode[6] = 662;
+		//cutNode[7] = 521;
+		//cutNode[8] = 638;
+		
+		cutNode[7] = 638;
+		cutNode[8] = 521;
+		
+    	resetStatusAndBreakpointsGivenCutNodes(cutNode, onPoints);//need to reset the status breakpoints by the testCutNode
+    	//use the tree to re-partition according to the change.
+		clusterLabelArray = setClusterLabelsByTestCutNodeByNodeOrder(cutNode);
+		relabelClusterLabels(clusterLabelArray, oldclusterLabelArray); //will move it out
+		for(int i=0; i < numdata; i++){
+			clusterLabels.setParameterValue(i, clusterLabelArray[i]); 	//set cluster label parameter 					
+		}
+		
+		//setVirusLocationAndOffsets(); //set virus locations, given the clusterlabels parameter
+		setVirusLocationAutoCorrelatedModel();
+	
+		System.out.println(clusterLikelihood.getLogLikelihood());
+		System.exit(0);
+		
+		
+		
+		
+		//NEED TO SET THE OTHER BREAKPOINTS TO MAKE SURE THERE ARE NO DUPLICATES AT THE END...
+		//SO OTHER PROPOSALS WOULD FUNCTION OK OK
+		
+ 
+		
+		return(Double.POSITIVE_INFINITY); //it should be anything... always accept for the Gibbs move.    	
+    }
+    	
+   
+
+
+
+
+
+	private void resetStatusAndBreakpointsGivenCutNodes(int[] testCutNode, int[] onPoints) {
+		
+		for(int i=0; i < binSize; i++){
+			status.setParameterValue(i, 0);
+			breakPoints.setParameterValue(i, -1);
+		}
+		
+    	int numOn = testCutNode.length;
+    	int countOn=0;
+    	for(int i=0; i < binSize; i++){
+   			if(countOn < numOn){
+   				status.setParameterValue(onPoints[countOn], 1);
+    			breakPoints.setParameterValue(onPoints[countOn],testCutNode[countOn]); //reset breakPoints accordingly
+    			countOn++;
+    		}
+    		
+    	}
+	}
+
+
+
+	private void updateK() {
+
+    	//K is changed accordingly..
+		int K_count = 0; //K_int gets updated
+		for(int i=0; i < binSize; i++){
+			K_count += (int) status.getParameterValue(i);
+		}
+		//System.out.println("K now becomes " + K_count);
+		K.setParameterValue(0, K_count); //update 
+ 			
+				
+	}
+
+	
+	private void printCutNode(int[] cutNode) {
+    	System.out.print("sampled:\t[");
+		for(int i=0; i < cutNode.length ; i++){
+    		System.out.print(cutNode[i] + ",");
+    	}	
+    	System.out.println("]");
+
+	}
+
+
+
+	private double[] calculateConditionalProbabilityGivenLogNumeratorProb(
+			double[] logNumeratorProb) {
+		int numNodes = logNumeratorProb.length;
+  		double maxLogProb = logNumeratorProb[0];
+  		for(int i=0; i < numNodes; i++ ){
+  			if(logNumeratorProb[i] > maxLogProb){
+  				maxLogProb = logNumeratorProb[i];
+  			}
+  		}  		
+  		
+  		double sumLogDenominator = 0;
+  		for(int i=0; i < numNodes; i++){
+  			if(logNumeratorProb[i] != Double.NEGATIVE_INFINITY){
+  				sumLogDenominator += Math.exp((logNumeratorProb[i]-maxLogProb));
+  			}
+  		}
+  		sumLogDenominator = Math.log(sumLogDenominator) + maxLogProb;
+  		
+  		double sumProb = 0;
+  		double []condProb = new double[numNodes]; 
+  		for(int i=0; i < numNodes; i++){
+  			condProb[i] = Math.exp( logNumeratorProb[i] - sumLogDenominator   );
+			//System.out.println("condProb of site " + i + " = " + condProb[i]);
+					sumProb +=condProb[i];
+				if(condProb[i] > 0.01){
+//					System.out.println("**site " + i + " with prob=" + condProb[i]  + "  steps from previous=" + numStepsFromOrigin[i]);
+			}
+  		}
+  		return(condProb);
+	}
+	
+	
+	
+	//Expect the ratio to be 1.
+	private double verifyAssumption( double topLogMarginal,double[] logNumeratorProb) {
+		
+		
+		for(int i=0; i < numNodes; i++){
+			System.out.println(logNumeratorProb[i]);
+		}
+		int numNodes = logNumeratorProb.length;
+  		double maxLogProb = logNumeratorProb[0];
+  		for(int i=0; i < numNodes; i++ ){
+  			if(logNumeratorProb[i] > maxLogProb){
+  				maxLogProb = logNumeratorProb[i];
+  			}
+  		}  		
+  		System.out.println("maxLogProb = " + maxLogProb);
+  		
+  		double sumLogDenominator = 0;
+  		for(int i=0; i < numNodes; i++){
+  			if(logNumeratorProb[i] != Double.NEGATIVE_INFINITY){
+  				sumLogDenominator += Math.exp((logNumeratorProb[i]-maxLogProb));
+  			}
+  		}
+  		System.out.println("tmp sum = " + sumLogDenominator);
+  		sumLogDenominator = Math.log(sumLogDenominator) + maxLogProb;
+  		
+  		System.out.println("topLogMarginal = " + topLogMarginal);
+  		System.out.println("sumLogDenominator = " + sumLogDenominator);
+  		double ratio = Math.exp( Math.log(numNodes-1) + topLogMarginal - sumLogDenominator   );
+  		
+  		return(ratio);
+	}
+
+
+
+
+	private int checkSiteHasBeenAddedToOnIndicators(int curTest){
+		int hasBeenAdded=0;
+		for(int i=0; i < binSize; i++){
+			if((int)status.getParameterValue(i) == 1 ){
+				if( (int) breakPoints.getParameterValue(i) == curTest){
+					hasBeenAdded=1;
+					break;
+				}
+			}
+		}
+		return(hasBeenAdded);
+	}
+
+
+	private void setVirusLocationAndOffsets() {
+		
+		//change the mu in the toBin and fromBIn
+		//borrow from getLogLikelihood:
+
+		double[] meanYear = new double[binSize];
+		double[] groupCount = new double[binSize];
+		for(int i=0; i < numdata; i++){
+			int label = (int) clusterLabels.getParameterValue(i);
+			double year  = 0;
+	        if (virusOffsetsParameter != null) {
+	            //	System.out.print("virus Offeset Parameter present"+ ": ");
+	            //	System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+	            //	System.out.print(" drift= " + drift + " ");
+	                year = virusOffsetsParameter.getParameterValue(i);   //just want year[i]
+	                		//make sure that it is equivalent to double offset  = year[virusIndex] - firstYear;
+	            }
+	            else{
+	            	System.out.println("virus Offeset Parameter NOT present. We expect one though. Something is wrong.");
+	            }
+			meanYear[ label] = meanYear[ label] + year;
+			
+			groupCount[ label  ] = groupCount[ label ]  +1; 
+		}
+					
+		for(int i=0; i < binSize; i++){
+			if(groupCount[i] > 0){
+				meanYear[i] = meanYear[i]/groupCount[i];
+			}
+			//System.out.println(meanYear[i]);
+		}
+
+
+		mu0_offset = new double[binSize];
+		//double[] mu1 = new double[maxLabel];
+				
+		
+		//System.out.println("maxLabel=" + maxLabel);
+		//now, change the mu..
+		for(int i=0; i < binSize; i++){
+			//System.out.println(meanYear[i]*beta);
+			mu0_offset[i] =  meanYear[i];
+			//System.out.println("group " + i + "\t" + mu0_offset[i]);
+		}	
+	//		System.out.println("=====================");
+		
+		
+		//Set  the vLoc to be the corresponding mu values , and clusterOffsetsParameter to be the corresponding offsets
+    	//virus in the same cluster has the same position
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+    		Parameter vLoc = virusLocations.getParameter(i);
+    		//setting the virus locs to be equal to the corresponding mu
+    			double muValue = mu.getParameter(label).getParameterValue(0);    			
+    			vLoc.setParameterValue(0, muValue);
+    			double	muValue2 = mu.getParameter(label).getParameterValue(1);
+   				vLoc.setParameterValue(1, muValue2);
+	   			//System.out.println("vloc="+ muValue + "," + muValue2);
+    	}
+    	
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+   			//if we want to apply the mean year virus cluster offset to the cluster
+   			if(clusterOffsetsParameter != null){
+   			//setting the clusterOffsets to be equal to the mean year of the virus cluster
+   				// by doing this, the virus changes cluster AND updates the offset simultaneously
+   				clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);
+   			}
+ 				//		System.out.println("mu0_offset[label]=" + mu0_offset[label]);
+ 		//		System.out.println("clusterOffsets " +  i +" now becomes =" + clusterOffsetsParameter.getParameterValue(i) );   			
+    	}
+
+    	
+
+    	
+//    	System.out.println("===The on nodes===");
+//    	for(int i=0; i < binSize; i++){	    
+//    		if((int) excisionPoints.getParameterValue(i) == 1){
+//    			System.out.println("Cluster node " + i + " = " + (int) indicators.getParameterValue(i) + "\tstatus=" + (int) excisionPoints.getParameterValue(i));
+//    		}
+//    	}
+    	
+		
+	}
+
+
+
+
+	private void setVirusLocationAutoCorrelatedModel() {
+			int numNodes = treeModel.getNodeCount();
+			double[][] nodeloc = new double[numNodes][2];
+			
+			//new - Trevor's autocorrelated model.
+		//	System.out.println("Autocorrelated tree model");
+			
+			//given mu, excision points, and which ones are on...
+
+			
+			int[] nodeStatus = new int[numNodes];
+			for(int i=0; i < numNodes; i ++){
+				nodeStatus[i] = -1;
+			}
+			//convert to easy process format.
+			for(int i=0; i < (binSize ); i++){
+				if((int) status.getParameterValue(i) ==1){
+					  nodeStatus[(int)breakPoints.getParameterValue(i)] = i;
+				}
+			}
+
+			
+//Testing:
+//muValue[0] = 1;
+//muValue2[0] = 1.5;	  
+//nodeStatus[696] = 0;  
+//muValue[1] = 10;
+//muValue2[1] = 20;
+//nodeStatus[607] = 1;  
+//muValue[2] = 200;
+//muValue2[2] = 300;
+
+			
+			//process the tree and get the vLoc of the viruses..
+			//breadth first depth first..
+			NodeRef cNode = treeModel.getRoot();
+		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+		    
+		    visitlist.add(cNode);
+		    
+		    int countProcessed=0;
+		    while(visitlist.size() > 0){
+		    	
+		    	
+		    	countProcessed++;
+		    	//assign value to the current node...
+		    	if(treeModel.getParent(cNode) == null){
+		    		Parameter curMu = mu.getParameter(0);
+		    		nodeloc[cNode.getNumber()][0] =   curMu.getParameterValue(0);
+		    		nodeloc[cNode.getNumber() ][1] = curMu.getParameterValue(1);
+		    	}
+		    	else{
+		    		nodeloc[cNode.getNumber()][0] =   nodeloc[treeModel.getParent(cNode).getNumber()][0];
+		    		nodeloc[cNode.getNumber()][1] =   nodeloc[treeModel.getParent(cNode).getNumber()][1];
+		    		
+		    		if(nodeStatus[cNode.getNumber()] != -1){
+		    			//System.out.println("Run new location");
+			    		Parameter curMu = mu.getParameter(nodeStatus[cNode.getNumber()] +1);
+		    			nodeloc[cNode.getNumber()][0] += curMu.getParameterValue(0);
+		    			nodeloc[cNode.getNumber()][1] += curMu.getParameterValue(1);	  			    			
+		    		}
+		    	}
+		    	
+		    	
+		    	//add all the children to the queue
+	  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+	  				NodeRef node= treeModel.getChild(cNode,childNum);
+	  				visitlist.add(node);
+	  	        }
+	  			
+		  			
+		  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+	
+	  			if(visitlist.size() > 0){
+	  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+	  			}
+	  			
+  			
+		}
+		    
+		    //write the virus locations
+		    for(int i=0; i < numdata; i++){
+		    	Parameter vLocParameter = virusLocations.getParameter(i);
+		    	vLocParameter.setParameterValue(0, nodeloc[membershipToClusterLabelIndexes[i]][0]);
+		    	vLocParameter.setParameterValue(1, nodeloc[membershipToClusterLabelIndexes[i]][1]);
+		    }
+			
+		    
+		    //for(int i=0; i < numdata; i++){
+				//Parameter vLocP= virusLocations.getParameter(i);
+		    	//System.out.println("virus " + vLocP.getId() + "\t" + vLocP.getParameterValue(0) + "," + vLocP.getParameterValue(1)  );	  			    	
+		    //}
+		    
+		    
+		    
+			//System.out.println("Processed " + countProcessed + " nodes");
+			
+			//System.out.println("Done");			
+			
+			//System.exit(0);		
+	}
+
+
+
+
+
+	private void relabelClusterLabels(int[] clusterLabel, int[] oldclusterLabel) {
+
+    	int maxOldLabel = 0;
+    	for(int i=0; i < oldclusterLabel.length; i++){
+    		if(maxOldLabel < oldclusterLabel[i]){
+    			maxOldLabel = oldclusterLabel[i];
+    		}
+    	}
+    	
+    	
+        Map<Integer, Integer> m = new HashMap<Integer, Integer>();
+        int[] isOldUsed = new int[ clusterLabel.length  ]; //an overkill - basically just need the max label in the old cluster
+        
+        for(int i=0; i < clusterLabel.length; i++){
+        	
+        	
+    		if(m.get(new Integer(clusterLabel[i])) == null ){
+    			if(isOldUsed[oldclusterLabel[i]] == 0){
+    				m.put(new Integer(clusterLabel[i]), new Integer(oldclusterLabel[i]));
+    				isOldUsed[oldclusterLabel[i]] = 1;
+    			}
+    			else{
+    				maxOldLabel++;
+    				m.put(new Integer(clusterLabel[i]), new Integer(maxOldLabel));
+    			}
+    			
+    		}
+
+    		clusterLabel[i] = m.get(new Integer( clusterLabel[i])).intValue();
+    		
+    	}
+	}
+
+
+
+	private int[] setClusterLabelsByTestCutNodeByNodeOrder(int[] testCutNode) {
+        int []membership = determine_membershipByNodeOrder(treeModel, testCutNode, testCutNode.length);  // the time consuming step here.
+        
+  	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+  	   //so I have to search for the matching indexes
+       // for(int i=0; i < numdata; i++){
+     	//   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+       //}
+
+        //to speed up the code
+		int[] clusterLabel = new int[numdata];
+
+        for(int i=0; i < numdata; i++){
+        	clusterLabel[i] =  membership[membershipToClusterLabelIndexes[i]];
+        }
+        return(clusterLabel);
+	}
+
+
+
+	private void setMembershipToClusterLabelIndexes(){
+        int numNodes = treeModel.getNodeCount();
+
+  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+         membershipToClusterLabelIndexes = new int[numdata]; 
+         for(int i=0; i < numdata; i++){
+  		   Parameter v = virusLocations.getParameter(i);
+  		   String curName = v.getParameterName();
+  		  // System.out.println(curName);
+  		   int isFound = 0;
+      	   for(int j=0; j < numNodes; j++){
+      		   String treeId = treeModel.getTaxonId(j);
+      		   if(curName.equals(treeId) ){
+      		//	   System.out.println("  isFound at j=" + j);
+      			   membershipToClusterLabelIndexes[i] = j;
+      			   isFound=1;
+      			   break;
+      		   }	   
+      	   }
+      	   if(isFound ==0){
+      		   System.out.println("not found. Exit now.");
+      		   System.exit(0);
+      	   }     	   
+         }
+    }
+    
+    //private void setClusterLabelsByTestCutNode(int[] testCutNode) {
+    private int[] setClusterLabelsByTestCutNode(int[] testCutNode) {
+
+        int []membership = determine_membership(treeModel, testCutNode, testCutNode.length);  // the time consuming step here.
+        
+  	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+  	   //so I have to search for the matching indexes
+       // for(int i=0; i < numdata; i++){
+     	//   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+       //}
+
+        //to speed up the code
+		int[] clusterLabel = new int[numdata];
+
+        for(int i=0; i < numdata; i++){
+        	clusterLabel[i] =  membership[membershipToClusterLabelIndexes[i]];
+        }    
+        
+        return(clusterLabel);
+		
+	}
+
+
+
+    
+    //This function uses the breakPoints, but the doOperation doesn't use this anymore..
+    //instead, it uses the cutNodes explicitly.
+	private void setClusterLabels(int K_int) {
+
+        int numNodes = treeModel.getNodeCount();
+        int[] cutNodes = new int[K_int];
+ 	   int cutNum = 0;
+ 	   String content = "";
+        for(int i=0; i < binSize; i++){
+     	   if( (int) status.getParameterValue( i ) ==1 ){
+     		   cutNodes[cutNum] = (int) breakPoints.getParameterValue(i);
+     		   content += (int) breakPoints.getParameterValue(i) + ",";
+     		   cutNum++;
+     	   }
+     	  
+        }
+       // System.out.println(content);
+        
+        if(cutNum != K_int){
+        	System.out.println("cutNum != K_int. we got a problem");
+        }
+          
+    //    for(int i=0; i < K_int; i++){
+    // 	   System.out.println(cutNodes[i]);
+     //   }
+        
+        //int []membership = determine_membership(treeModel, cutNodes, K_int-1);
+        int []membership = determine_membership(treeModel, cutNodes, K_int);
+        
+        double uniqueCode = 0;
+        for(int i=0; i < numNodes; i++){
+        	uniqueCode += membership[i]*i;
+        }
+      //  System.out.println(" sum = " + uniqueCode);
+        
+     //   System.out.println("number of nodes = " + treeModel.getNodeCount());
+      //  for(int i=0; i < treeModel.getNodeCount(); i++){
+     //	   System.out.println(membership[i]);
+      //  }
+        
+        
+        //System.out.println("Done");
+        
+      //  for(int i=0; i < numdata; i++){
+ 	//	   Parameter v = virusLocations.getParameter(i);
+ 	//	   String curName = v.getParameterName();
+ 	//	   System.out.println("i=" + i + " = " + curName);       
+ 	//	}       
+        
+      //  for(int j=0; j < numdata; j++){
+     //	   System.out.println("j=" + j + " = " + treeModel.getTaxonId(j));
+      //  }
+        
+        
+ 	//   Parameter vv = virusLocations.getParameter(0);
+ 	 //  String curNamev = vv.getParameterName();
+ 	   
+ 	 //  System.out.println(curNamev + " and " +treeModel.getTaxonId(392) );
+ 	   //System.out.println(  curNamev.equals(treeModel.getTaxonId(392) )  );
+ 	   
+        
+        //System.exit(0);
+        
+ 	  // System.out.println("numNodes=" + numNodes);
+ 	  // System.exit(0);
+        //create dictionary:
+ 	   
+ 	   //I suspect this is an expensive operation, so I don't want to do it many times,
+ 	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+        int []membershipToClusterLabelIndexes = new int[numdata]; 
+        for(int i=0; i < numdata; i++){
+ 		   Parameter v = virusLocations.getParameter(i);
+ 		   String curName = v.getParameterName();
+ 		  // System.out.println(curName);
+ 		   int isFound = 0;
+     	   for(int j=0; j < numNodes; j++){
+     		   String treeId = treeModel.getTaxonId(j);
+     		   if(curName.equals(treeId) ){
+     		//	   System.out.println("  isFound at j=" + j);
+     			   membershipToClusterLabelIndexes[i] = j;
+     			   isFound=1;
+     			   break;
+     		   }
+     		   
+     	   }
+     	   if(isFound ==0){
+     		   System.out.println("not found. Exit now.");
+     		   System.exit(0);
+     	   }     	   
+        }
+        
+        
+       // System.exit(0);
+        
+      //  for(int i=0; i < numdata; i++){
+     //	   System.out.println(membershipToClusterLabelIndexes[i]);
+      //  }
+       // System.exit(0);
+        
+        for(int i=0; i < numdata; i++){
+     	   //The assumption that the first nodes being external node corresponding to the cluster labels IS FALSE
+     	   //so I have to search for the matching indexes
+     	   Parameter vloc = virusLocations.getParameter(i);
+  
+     	   
+//must uncomment out because this sets the new partitioning ... now i am doing code testing.     	   
+     	   clusterLabels.setParameterValue( i, membership[membershipToClusterLabelIndexes[i]]);
+     	   //System.out.println(vloc.getParameterName() + " i="+ i + " membership=" + (int) clusterLabels.getParameterValue(i));
+     	   
+     	 //  Parameter v = virusLocations.getParameter(i);
+     	  // System.out.println(v.getParameterName());
+        }
+        
+
+    	
+	}
+
+    
+    
+    private static boolean isCutNode(int number, int cutNodes[], int numCut) {
+    	if(numCut > 0){
+    		for(int i=0; i < numCut; i++){
+    			if(number == cutNodes[i]){
+    				return true;
+    			}
+    		}
+    	}
+    	return false;
+    }
+    
+    
+
+  //traverse down the tree, top down, do calculation
+  static int[] determine_membership(TreeModel treeModel, int[] cutNodes, int numCuts){
+
+  	
+  NodeRef root = treeModel.getRoot();
+
+  int numClusters = 1;
+  LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+  list.addFirst(root);
+
+  int[] membership = new int[treeModel.getNodeCount()];
+  for(int i=0; i < treeModel.getNodeCount(); i++){
+  	membership[i] = -1;
+  }
+  membership[root.getNumber()] = 0; //root always given the first cluster
+        
+  while(!list.isEmpty()){
+  	//do things with the current object
+  	NodeRef curElement = list.pop();
+  	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+  	String content = "node #" + curElement.getNumber() +", taxon= " ;
+  	if(treeModel.getNodeTaxon(curElement)== null){
+  		content += "internal node\t";
+  	}
+  	else{
+  		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+  		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+  	}
+  	
+     	if(treeModel.getParent(curElement)== null){
+  		//content += "no parent";
+  	}
+  	else{
+  		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+  	}
+  	
+  	//cluster assignment:
+  	if(!treeModel.isRoot(curElement)){
+  	 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+  	//if(isCutNode(curElement.getNumber())){
+  		numClusters++ ;
+  		membership[ curElement.getNumber() ] = numClusters - 1; 
+    	}
+  	else{
+  		//inherit from parent's cluster assignment
+  		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+  	 }
+  	        	
+  	}//is not Root
+  	content += " cluster = " + membership[curElement.getNumber()] ; 
+  	
+  //	System.out.println(content);
+
+  	
+      for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+      	list.addFirst(treeModel.getChild(curElement,childNum));
+      }
+  }
+
+   return(membership);
+  }
+
+    
+  //traverse down the tree, top down, do calculation
+  static int[] determine_membershipByNodeOrder(TreeModel treeModel, int[] cutNodes, int numCuts){
+
+
+      Map<Integer, Integer> m = new HashMap<Integer, Integer>();
+      for(int i=0; i < numCuts; i++){
+    	  m.put(new Integer(cutNodes[i]), new Integer(i+1));
+    	  
+    	//  System.out.println(cutNodes[i] + "\t" + (i+1) );
+      }
+      
+  	
+  NodeRef root = treeModel.getRoot();
+
+  int numClusters = 1;
+  LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+  list.addFirst(root);
+
+  int[] membership = new int[treeModel.getNodeCount()];
+  for(int i=0; i < treeModel.getNodeCount(); i++){
+  	membership[i] = -1;
+  }
+  membership[root.getNumber()] = 0; //root always given the first cluster
+        
+  while(!list.isEmpty()){
+  	//do things with the current object
+  	NodeRef curElement = list.pop();
+  	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+  	String content = "node #" + curElement.getNumber() +", taxon= " ;
+  	if(treeModel.getNodeTaxon(curElement)== null){
+  		content += "internal node\t";
+  	}
+  	else{
+  		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+  		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+  	}
+  	
+     	if(treeModel.getParent(curElement)== null){
+  		//content += "no parent";
+  	}
+  	else{
+  		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+  	}
+  	
+  	//cluster assignment:
+  	if(!treeModel.isRoot(curElement)){
+  	 if(isCutNode(curElement.getNumber(), cutNodes, numCuts)){
+  	//if(isCutNode(curElement.getNumber())){
+  		//numClusters++ ;
+  		//membership[ curElement.getNumber() ] = numClusters - 1;
+  		// System.out.println("get: curElement" + curElement.getNumber() + "\t" + m.get(new Integer( curElement.getNumber())));
+  		membership[ curElement.getNumber()] = m.get(new Integer( curElement.getNumber()));
+  		
+    }
+  	else{
+  		//inherit from parent's cluster assignment
+  		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+  	 }
+  	        	
+  	}//is not Root
+  	content += " cluster = " + membership[curElement.getNumber()] ; 
+  	
+  //	System.out.println(content);
+
+  	
+      for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+      	list.addFirst(treeModel.getChild(curElement,childNum));
+      }
+  }
+
+   return(membership);
+  }
+   
+
+
+	public void accept(double deviation) {
+    	super.accept(deviation);
+
+    	/*
+    	if(isMoveMu==1){
+    		numAcceptMoveMu++;
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numAcceptMoveC++;
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/  
+    	        	
+    	//	if(virusIndexChange <5){
+    //		System.out.println("     -  Accepted!");
+    	//	}
+          	
+    }
+    
+    public void reject(){
+    	super.reject();
+ 
+    	
+    	/*
+    	//manually change mu back..
+    	if(isMoveMu==1){
+			 mu.getParameter(groupSelectedChange).setParameterValue(dimSelectChange, originalValueChange);
+    	}
+    	//manually change all the affected vLoc back...
+    	for(int i=0; i < numdata; i++){
+        	int label = (int) clusterLabels.getParameterValue(i);
+    		Parameter vLoc = virusLocations.getParameter(i);   		
+    		//	double muValue = mu.getParameter(label).getParameterValue(0);
+    		//	vLoc.setParameterValue(0, muValue);
+    		//  double	muValue2 = mu.getParameter(label).getParameterValue(1);
+   			//	vLoc.setParameterValue(1, muValue2);
+	
+ 			clusterOffsetsParameter.setParameterValue( i , mu0_offset[label]);   			
+    	}
+    	*/
+    	
+    	
+    	/*
+    	if(isMoveMu==1){
+    		numProposeMoveMu++;
+        	System.out.println("% accept move Mu = " + numAcceptMoveMu/(double)numProposeMoveMu);
+    	}
+    	else{    	   
+    		numProposeMoveC++;
+        	System.out.println("% accept move C = " + numAcceptMoveC/(double)numProposeMoveC);
+    	}
+    	*/
+    	//if(virusIndexChange < 5){
+		System.out.println("        	*      Rejected!");
+    	//}
+      	
+      	
+      	/*
+      	for(int i=0; i < numdata; i++){
+      		Parameter vLoc = virusLocations.getParameter(i);
+
+      		if( vLoc.getParameterValue(0) != old_vLoc0[i]){
+
+      			System.out.println("virus " + i + " is different: " + vLoc.getParameterValue(0) + " and " + old_vLoc0[i]);
+      		}
+      		
+      		//System.out.println(old_vLoc0[i] + ", " + old_vLoc1[i]);
+      		vLoc.setParameterValue(0, old_vLoc0[i]);
+      		vLoc.setParameterValue(1, old_vLoc1[i]);
+      		
+		}
+      	*/
+  		//System.exit(0);
+
+      	
+      	
+
+    }
+    
+	
+
+           public final static String TREE_CLUSTERSEQUENTIAL_OPERATOR = "TreeClusterSequentialSampling";
+
+              
+            //MCMCOperator INTERFACE
+            public final String getOperatorName() {
+                return TREE_CLUSTERSEQUENTIAL_OPERATOR;
+            }
+
+            public final void optimize(double targetProb) {
+
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public boolean isOptimizing() {
+                return false;
+            }
+
+            public void setOptimizing(boolean opt) {
+                throw new RuntimeException("This operator cannot be optimized!");
+            }
+
+            public double getMinimumAcceptanceLevel() {
+                return 0.1;
+            }
+
+            public double getMaximumAcceptanceLevel() {
+                return 0.4;
+            }
+
+            public double getMinimumGoodAcceptanceLevel() {
+                return 0.20;
+            }
+
+            public double getMaximumGoodAcceptanceLevel() {
+                return 0.30;
+            }
+
+            public String getPerformanceSuggestion() {
+                if (Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel()) {
+                    return "";
+                } else if (Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel()) {
+                    return "";
+                } else {
+                    return "";
+                }
+            }
+
+        
+           
+        
+
+            public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+            	
+
+                public final static String VIRUSLOCATIONS = "virusLocations";
+            	public final static String  MU = "mu";
+            	public final static String CLUSTERLABELS = "clusterLabels";
+            	public final static String K = "k";
+            	public final static String OFFSETS = "offsets";
+      //     	public final static String LOCATION_DRIFT = "locationDrift"; //no longer need
+            	
+                public final static String CLUSTER_OFFSETS = "clusterOffsetsParameter";
+                
+            	public final static String INDICATORS = "indicators";
+
+                public final static String EXCISION_POINTS = "excisionPoints";
+
+
+                public String getParserName() {
+                    return TREE_CLUSTERSEQUENTIAL_OPERATOR;
+                }
+
+                /* (non-Javadoc)
+                 * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+                 */
+                public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+                	
+                	//System.out.println("Parser run. Exit now");
+                	//System.exit(0);
+
+                    double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+                    
+                    XMLObject cxo = xo.getChild(VIRUSLOCATIONS);
+                        MatrixParameter virusLocations = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        
+                        cxo = xo.getChild(MU);
+                        MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                        cxo = xo.getChild(CLUSTERLABELS);
+                        Parameter clusterLabels = (Parameter) cxo.getChild(Parameter.class);
+
+                        cxo = xo.getChild(K);
+                        Parameter k = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        cxo = xo.getChild(OFFSETS);
+                        Parameter offsets = (Parameter) cxo.getChild(Parameter.class);
+ 
+//                        cxo = xo.getChild(LOCATION_DRIFT);
+//                        Parameter locationDrift = (Parameter) cxo.getChild(Parameter.class);
+                        
+                        Parameter clusterOffsetsParameter = null;
+                        if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
+                        	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+                        }
+
+                        cxo = xo.getChild(INDICATORS);
+                        Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                      
+                        cxo = xo.getChild(EXCISION_POINTS);
+                        Parameter excisionPoints = (Parameter) cxo.getChild(Parameter.class);
+                      
+                    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+ 
+                    AGLikelihoodTreeCluster agLikelihood = (AGLikelihoodTreeCluster) xo.getChild(AGLikelihoodTreeCluster.class);
+                        
+                    //return new ClusterAlgorithmOperator(virusLocations, mu, clusterLabels, k, weight, offsets, locationDrift, clusterOffsetsParameter);
+                        return new TreeClusterSequentialSampling(virusLocations, mu, clusterLabels, k, weight, offsets,  clusterOffsetsParameter, indicators, excisionPoints, treeModel, agLikelihood);
+
+                }
+
+                //************************************************************************
+                // AbstractXMLObjectParser implementation
+                //************************************************************************
+
+                public String getParserDescription() {
+                    return "An operator that picks a new allocation of an item to a cluster under the Dirichlet process.";
+                }
+
+                public Class getReturnType() {
+                    return TreeClusterSequentialSampling.class;
+                }
+
+
+                public XMLSyntaxRule[] getSyntaxRules() {
+                    return rules;
+                }
+
+                private final XMLSyntaxRule[] rules = {
+                        AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                        
+                        new ElementRule(VIRUSLOCATIONS, Parameter.class),
+                        new ElementRule(MU, Parameter.class),
+                        new ElementRule(CLUSTERLABELS, Parameter.class),
+                        new ElementRule(K, Parameter.class),
+                        new ElementRule(OFFSETS, Parameter.class),
+                  //      new ElementRule(LOCATION_DRIFT, Parameter.class), //no longer needed
+   //                    
+                       new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),  // no longer REQUIRED
+                       new ElementRule(INDICATORS, Parameter.class),
+                       new ElementRule(EXCISION_POINTS, Parameter.class),
+                       new ElementRule(TreeModel.class),
+
+        
+            };
+            
+            };
+
+
+        
+            public int getStepCount() {
+                return 1;
+            }
+
+        }
+
+
+
+
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/serumDriftScalingMuOperator.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/serumDriftScalingMuOperator.java
new file mode 100644
index 0000000..263a780
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/serumDriftScalingMuOperator.java
@@ -0,0 +1,214 @@
+
+package dr.evomodel.antigenic.phyloClustering.misc.obsolete;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.MCMCOperators.serumDriftActiveScaledMu1Operator;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractCoercableOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.OperatorUtils;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.inference.operators.MCMCOperator.Utils;
+import dr.math.MathUtils;
+import dr.util.DataTable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class serumDriftScalingMuOperator extends AbstractCoercableOperator {
+
+	
+
+    private Parameter serumDrift = null;   
+
+    private MatrixParameter mu = null;
+    private Parameter muMean = null;
+    private Parameter muPrec = null;
+	private double scaleFactor;
+
+	
+	public serumDriftScalingMuOperator(double weight, MatrixParameter mu, Parameter muMean, Parameter muPrec, Parameter serumDrift, double scale){
+    
+        super(CoercionMode.COERCION_ON);
+		
+		setWeight(weight);
+        this.mu = mu;
+        this.muMean = muMean;
+        this.muPrec = muPrec;
+        this.serumDrift = serumDrift;
+		this.scaleFactor = scale;
+	}
+	
+	
+
+	public double doOperation() throws OperatorFailedException {
+
+		
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+
+     //   System.out.println("serumDriftScaling operator ran");
+      //  System.out.println("scale=" + scale);
+        //changing serum drift
+		serumDrift.setParameterValue(0, scale *serumDrift.getParameterValue(0) );
+		
+		//changing mu
+		//System.out.println("dimension=" + mu.getColumnDimension());
+		for(int i=0; i < mu.getColumnDimension(); i++){
+			Parameter m = mu.getParameter(i);
+			m.setParameterValue(0, scale*m.getParameterValue(0));
+		}
+		//changing muMean
+		muMean.setParameterValue(0, scale*muMean.getParameterValue(0));
+		
+		//changing muPrec
+		muPrec.setParameterValue(0, scale*scale*muPrec.getParameterValue(0));
+				
+        double logq = -Math.log(scale);
+        return logq;
+	}
+	
+	
+	
+
+
+	//copied from the original ScaleOperator
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.exp(value) + 1.0);
+    }
+
+	//copied from the original ScaleOperator
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+	
+	
+	//copied from the original ScaleOperator
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+	//copied from the original ScaleOperator
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+	
+	
+
+    
+    public final static String SERUMDRIFTSCALINGMUOperator = "serumDriftScalingMuOperator";
+
+    public final String getOperatorName() {
+        return SERUMDRIFTSCALINGMUOperator;
+    }
+
+    
+
+
+    
+    
+    
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	
+
+    	public final static String SCALE = "scaleFactor";
+    	public final static String  MU = "mu";
+    	public final static String  SERUMDRIFT = "serumDrift";       
+    	public final static String MUMEAN = "muMean";
+    	public final static String MUPREC = "muPrec";
+
+        public String getParserName() {
+            return SERUMDRIFTSCALINGMUOperator;
+        }
+
+        /* (non-Javadoc)
+         * @see dr.xml.AbstractXMLObjectParser#parseXMLObject(dr.xml.XMLObject)
+         */
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+
+            double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+            double scale = xo.getDoubleAttribute(SCALE);              
+                
+            XMLObject cxo = xo.getChild(MU);
+                MatrixParameter mu = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+                cxo = xo.getChild(SERUMDRIFT);
+                Parameter serumDrift = (Parameter) cxo.getChild(Parameter.class);
+
+                cxo = xo.getChild(MUMEAN);
+                Parameter muMean = (Parameter) cxo.getChild(Parameter.class);
+                
+                
+                cxo = xo.getChild(MUPREC);
+                Parameter muPrec = (Parameter) cxo.getChild(Parameter.class);
+                
+                
+            return new serumDriftScalingMuOperator(weight, mu, muMean, muPrec, serumDrift,  scale);
+            
+            //	public serumDriftScalingMuOperator(double weight, MatrixParameter mu, Parameter muMean, Parameter muPrec, Parameter serumDrift, double scale){
+
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "changes serum drift and make sure the first dimension of the active drifted mus stay the same";
+        }
+
+        public Class getReturnType() {
+            return serumDriftActiveScaledMu1Operator.class;
+        }
+
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+                AttributeRule.newDoubleRule(SCALE),
+                new ElementRule(MU, Parameter.class),
+                new ElementRule(MUMEAN, Parameter.class),
+                new ElementRule(MUPREC, Parameter.class),
+                new ElementRule(SERUMDRIFT, Parameter.class),
+        };
+    
+    };
+
+
+
+    public int getStepCount() {
+        return 1;
+    }
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/simulateClusters.java b/src/dr/evomodel/antigenic/phyloClustering/misc/simulateClusters.java
new file mode 100644
index 0000000..74b8f95
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/simulateClusters.java
@@ -0,0 +1,689 @@
+package dr.evomodel.antigenic.phyloClustering.misc;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.LinkedList;
+import java.util.Random;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.AntigenicLikelihood;
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.antigenic.phyloClustering.Tree_Clustering_Shared_Routines;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.AbstractModelLikelihood;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Model;
+import dr.inference.model.Parameter;
+import dr.inference.model.Variable;
+import dr.inference.model.Variable.ChangeType;
+import dr.math.distributions.MultivariateNormalDistribution;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class simulateClusters  {
+	
+    public static final String CLASSNAME = "SimulateClustersAndHI";
+
+    private TreeModel treeModel;
+	private int[] clusterLabels;
+//	private Parameter clusterLabels;
+	private Parameter indicators;
+	private MatrixParameter virusLocations;
+    int []membershipToClusterLabelIndexes = null;
+    private Parameter virusOffsets;
+	private int []correspondingTreeIndexForVirus = null; //relates treeModels's indexing system to cluster label's indexing system of viruses. Gets assigned
+
+
+	private double[][] distCluster;
+	
+	//maybe run BEAST to load in the phylogeny.. and then introduce the cut
+	//do the cutting
+	//and then exit.
+	
+	//cut the tree
+	//simulate HI data
+
+
+	public simulateClusters(TreeModel treeModel_in, Parameter indicators_in ,int nClusters, int seraPerCluster, MatrixParameter vLoc_in,Parameter virusOffsets_in,
+			String HIFile, String distClusterFile, int seedNum, int minSize,
+	        double meanHomologousTiter,
+	        double unitPerTransition,
+	        double measurementNoiseVar){	
+		
+			Random generator = new Random(seedNum);
+			//Random generator = new Random();
+			
+		
+			System.out.println("loading the constructor for SimulateCluster and HI");
+			TreeModel treeModel = treeModel_in;
+
+			
+	        this.treeModel = treeModel_in;
+			this.indicators = indicators_in;
+			this.virusLocations = vLoc_in;
+			this.virusOffsets = virusOffsets_in;
+
+
+			System.out.println("sera per cluster = " + seraPerCluster);
+			System.out.println("nClusters = " + nClusters);
+			System.out.println("minimum cluster size = " + minSize);
+			System.out.println("meanHomologousTiter = " + meanHomologousTiter);
+			System.out.println("unit per transition = " + unitPerTransition);
+			System.out.println("measurement noise variance = " + measurementNoiseVar);
+			
+			System.out.println("seedNum = " + seedNum);
+			
+	        //initialize excision points
+	          //indicators.setDimension(treeModel.getNodeCount());
+	          for(int i=0; i < treeModel.getNodeCount(); i++){
+	        	  indicators.setParameterValue(i, 0);
+	          }
+
+
+	          
+
+	          //need to know which are the viruses
+	          
+	      	int numdata = virusLocations.getColumnDimension();
+	    	int numNodes = treeModel.getNodeCount();
+
+	      	
+//	      	correspondingTreeIndexForVirus = Tree_Clustering_Shared_Routines.setMembershipTreeToVirusIndexes(numdata, virusLocations, numNodes, treeModel);
+	        setMembershipToClusterLabelIndexes(); 
+
+   
+
+	        distCluster = new double[nClusters][];
+	        for(int i=0; i < nClusters; i++){
+	        	distCluster[i] = new double[nClusters];
+	        }
+	        
+	        int[] clusterSize = new int[1];
+	        double[][] serumYear = new double[1][1];
+	          
+	        //this.max_dim = max_dim_in;
+	        //this.virusLocations = virusLocations_in;
+	        
+	        indicators.setParameterValue( treeModel.getRoot().getNumber() ,1); 
+	        int numAddedClusters = 1;
+	        
+	        while(numAddedClusters < nClusters){
+			//Clustering routine:
+	        int I_selected;
+  			do{
+  				//I_selected = (int) (Math.floor(Math.random()*numNodes));
+  				I_selected = (int) (Math.floor(generator.nextDouble()*numNodes));
+  			}while( (int) indicators.getParameterValue(I_selected) == 1 );
+
+	        indicators.setParameterValue(I_selected, 1);
+  			numAddedClusters++;
+
+
+			
+	        
+	        //if each cluster has > 5 nodes, then this I_selected = 0
+	        
+	        //want to count the number of nodes in each cluster
+
+	        setClusterLabelsUsingIndicatorsAndCalculateDiff();
+	        
+	        clusterSize = new int[numAddedClusters];
+	        serumYear = new double[numAddedClusters][numdata];
+	        for(int i=0; i < numdata; i++){
+	        	serumYear[clusterLabels[i]][clusterSize[clusterLabels[i]]] = virusOffsets.getParameterValue(i);
+	        	clusterSize[ clusterLabels[i]] ++;
+	        	//System.out.println(clusterLabels[i] );
+	        }
+	        
+	        
+	        int reachMinClusterSize = 1;
+	        for(int i=0; i < numAddedClusters; i++){
+	        	if(clusterSize[i] < minSize){
+	        		reachMinClusterSize = 0;
+	        	}
+	        }
+	        
+	        if(reachMinClusterSize==1){
+		        for(int i=0; i < numAddedClusters; i++){
+		        	System.out.print("cluster " + i +  " size = " + clusterSize[i] + "\t");
+		        	for(int j=0; j < clusterSize[i]; j++){
+		        		System.out.print(serumYear[i][j]+",");
+		        	}
+		        	System.out.println("");
+		        }
+		        System.out.println("===========================================================");
+	        }
+	        
+	        if(reachMinClusterSize == 0){
+	        	indicators.setParameterValue(I_selected, 0);
+	        	numAddedClusters--;
+	        }
+	        
+	        
+	        }
+	        
+	        System.out.println("Cluster labels: ");
+	        for(int i=0; i < numdata; i++){
+	   		   Parameter v = virusLocations.getParameter(i);
+	  		   String curName = v.getParameterName();
+	        	System.out.println(  curName + "\t" + clusterLabels[i] );
+	        }
+	        
+	        
+	        /*
+	        try {
+	            //Whatever the file path is.
+	            File statText3 = new File("/Users/charles/Documents/researchData/clustering/simulations/H3N2_clusterAssignment.txt");
+	            FileOutputStream is3 = new FileOutputStream(statText3);
+	            OutputStreamWriter osw3 = new OutputStreamWriter(is3);    
+	            Writer w3 = new BufferedWriter(osw3);
+
+		        for(int i=0; i < numdata; i++){
+			   		   Parameter v = virusLocations.getParameter(i);
+			  		   String curName = v.getParameterName();
+			        	w3.write(  curName + "\t" + clusterLabels[i] +"\n");
+			     }
+	            
+		         w3.close();
+			    } catch (IOException e3) {
+			            System.err.println("Problem writing to the file");
+			    }
+			    */
+	        
+	        System.out.println("==============================================================");
+	        
+	        System.out.println("Indicators that are on:");
+	        for(int i=0; i < numNodes; i++){
+	        	if((int)indicators.getParameterValue(i) ==1){
+	        		System.out.println(i);
+	        	}
+	        }
+	        
+	        /*
+	        try {
+	            //Whatever the file path is.
+	            File statText2 = new File("/Users/charles/Documents/researchData/clustering/simulations/H3N2_HI_onIndicators.txt");
+	            FileOutputStream is2 = new FileOutputStream(statText2);
+	            OutputStreamWriter osw2 = new OutputStreamWriter(is2);    
+	            Writer w2 = new BufferedWriter(osw2);
+	            
+		        for(int i=0; i < numNodes; i++){
+		        	if((int)indicators.getParameterValue(i) ==1){
+		        		w2.write(i+"\n");
+		        	}
+		        }
+
+		         w2.close();
+			        } catch (IOException e2) {
+			            System.err.println("Problem writing to the file");
+			        }
+			   */ 
+	        
+	        
+			System.out.println("===============================================================");
+
+	        try {
+	        	System.out.println("distClusterFile = " + distClusterFile);
+	            //Whatever the file path is.
+	            File statText5 = new File(distClusterFile);
+	            FileOutputStream is5 = new FileOutputStream(statText5);
+	            OutputStreamWriter osw5 = new OutputStreamWriter(is5);    
+	            Writer w5 = new BufferedWriter(osw5);
+	   	        
+				for(int i=0; i < nClusters; i++){
+					for(int j=0; j < nClusters; j++){
+						System.out.print(distCluster[i][j] + " ");
+						w5.write(distCluster[i][j] + " ");
+					}
+					System.out.println("");
+					w5.write("\n");
+				}
+		         w5.close();
+			        } catch (IOException e5) {
+			            System.err.println("Problem writing to the file");
+			    
+			        
+			     }
+
+			
+			System.out.println("===============================================================");
+			
+	        //NEED
+	        //a way of telling how many cluster differences between 2 clusters..
+	        //this should be like a table.
+			
+			//When constructing clusters, also construct the distance table.
+	        	//just inherits from the parent + 1.
+	        //distClutser[i,j] = #
+	        
+
+	        
+	        
+	        
+	        try {
+	        	System.out.println("HIFile = " + HIFile);
+	            //Whatever the file path is.
+	            File statText = new File(HIFile);
+	            FileOutputStream is = new FileOutputStream(statText);
+	            OutputStreamWriter osw = new OutputStreamWriter(is);    
+	            Writer w = new BufferedWriter(osw);
+	            w.write("virusIsolate\tvirusStrain\tvirusYear\tserumIsolate\tserumStrain\tserumYear\ttiter\tsource\n");
+	   
+	        
+	        System.out.println("virusIsolate    virusStrain     virusYear       serumIsolate    serumStrain     serumYear       titer   source");
+	        for(int c=0; c< nClusters; c++){
+	        	for(int s=0; s<seraPerCluster; s++ ){
+	        		
+	        		//sample serum year
+	        		//int whichSample = (int) (Math.floor(Math.random()*clusterSize[c]));
+	        		int whichSample = (int) (Math.floor(generator.nextDouble()*clusterSize[c]));
+	        		double curSerumYear = serumYear[c][whichSample];
+	        		
+	        		for(int v=0; v< numdata; v++){
+		        		double[] meanTiter = new double[1];
+		        		meanTiter[0] = meanHomologousTiter - distCluster[c][clusterLabels[v]]*unitPerTransition;
+		        		double[][] prec = new double[1][1];
+		        		prec[0][0] = 1/measurementNoiseVar;
+		        
+		        		double[] values = MultivariateNormalDistribution.nextMultivariateNormalPrecision(meanTiter, prec);
+		        		
+		        		Parameter virus = virusLocations.getParameter(v);
+		        		String curName = virus.getParameterName();
+		        		//System.out.print("C" + clusterLabels[v] + "\t" + curName +"\t" + virusOffsets.getParameterValue(v) +"\t"  );
+		        		//System.out.print("C" + c +"\t" + "c" + c + "s" + (s+1) +"\t" + curSerumYear +"\t");
+		        		//System.out.println(Math.pow(2,values[0]) + "\tsimulation");
+	
+	//if(Math.random() < 0.1){
+		        		//if(distCluster[c][clusterLabels[v]] <= 3){
+		        		w.write("C" + clusterLabels[v] + "\t" + curName +"\t" + virusOffsets.getParameterValue(v) +"\t"  );
+		        		w.write("c" + c + "s" + (s+1) +"\t"+ "c" + c + "s" + (s+1) +"\t" + curSerumYear +"\t");
+		        		w.write(Math.pow(2,values[0]) + "\tsimulation\n");
+	//	}       		
+	        		}
+	        	}
+	        }
+
+	         w.close();
+		        } catch (IOException e) {
+		            System.err.println("Problem writing to the file");
+		    
+		        
+		     }
+		    
+	        
+	        
+	    	
+	    	//Make XML
+	    	//Run
+	    	
+			//System.exit(0);
+	}
+
+	
+	
+	
+
+	private void setMembershipToClusterLabelIndexes(){
+
+  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+		int numdata = virusLocations.getColumnDimension();
+		int numNodes = treeModel.getNodeCount();
+         membershipToClusterLabelIndexes = new int[numdata]; 
+         clusterLabels = new int[numdata];
+         for(int i=0; i < numdata; i++){
+  		   Parameter v = virusLocations.getParameter(i);
+  		   String curName = v.getParameterName();
+  		  // System.out.println(curName);
+  		   int isFound = 0;
+      	   for(int j=0; j < numNodes; j++){
+      		   String treeId = treeModel.getTaxonId(j);
+      		   if(curName.equals(treeId) ){
+      		//	   System.out.println("  isFound at j=" + j);
+      			   membershipToClusterLabelIndexes[i] = j;
+      			   isFound=1;
+      			   break;
+      		   }	   
+      	   }
+      	   if(isFound ==0){
+      		   System.out.println("not found. Exit now.");
+      		   System.exit(0);
+      	   }     	   
+         }
+    }
+	
+	
+	private void setClusterLabelsUsingIndicatorsAndCalculateDiff(){
+
+        int []membership = determine_membership_v3(treeModel);
+		int numdata = virusLocations.getColumnDimension();
+        for(int i=0; i < numdata; i++){   
+        	clusterLabels[i] = membership[membershipToClusterLabelIndexes[i]] ;
+        }
+	}
+	
+	
+
+    //traverse down the tree, top down, do calculation
+     int[] determine_membership_v3(TreeModel treeModel){
+	    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    		if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    			numClusters++ ;
+	    			membership[ curElement.getNumber() ] = numClusters - 1; 
+	    			
+	    			int parentClusterLabel = membership[treeModel.getParent(curElement).getNumber()] ; //parent cluster label
+	    			//assign distCluster
+	    			for(int i=0; i < (numClusters-1); i++){
+	    				distCluster[numClusters -1][i] = distCluster[ parentClusterLabel][i] +1;
+	    				distCluster[i][numClusters -1] = distCluster[i][ parentClusterLabel] +1; 
+
+	    			}
+	      	 	}
+	    		else{
+	    			//inherit from parent's cluster assignment
+	    			membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    		}        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(membership);
+    }
+
+	
+
+
+
+    
+    
+    //traverse down the tree, top down, do calculation
+    int[] determine_from_membership_v2(TreeModel treeModel){
+	    	//note: I set MAX_DIM as the most I would print, but in order to avoid bug, I 
+    	//declare the number of nodes as the most active nodes I can have.
+    	int[] fromMembership = new int[treeModel.getNodeCount()];
+    	for(int i=0; i < treeModel.getNodeCount(); i++){
+    		fromMembership[i ] = -1;
+    	}
+    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    	 if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    //		 System.out.print("indicator # " + curElement.getNumber()  + " ");
+	    		numClusters++ ;
+	    		membership[ curElement.getNumber() ] = numClusters - 1; 
+	    		fromMembership[numClusters -1] = membership[ treeModel.getParent(curElement).getNumber()];
+	    //		System.out.println("    membership " + (numClusters-1) + " assigned from " + membership[ treeModel.getParent(curElement).getNumber()] );
+	      	}
+	    	else{
+	    		//inherit from parent's cluster assignment
+	    		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    	 }
+	    	        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(fromMembership);
+   }
+
+	
+    
+    
+	
+	
+	
+    
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+    	   	
+    	public final static String EXCISIONPOINTS = "excisionPoints";
+    	public final static String CLUSTERLABELS = "clusterLabels";
+    	public final static String CLUSTERLABELSTREENODE = "clusterLabelsTreeNode";
+
+    	public final static String  MU = "mu";
+
+    	public final static String OFFSETS = "offsets";
+    	public final static String VIRUS_LOCATIONS = "virusLocations";
+    	public final static String VIRUS_LOCATIONSTREENODE = "virusLocationsTreeNodes";
+    	
+    	public final static String INDICATORS = "indicators";
+
+    	public final static String VIRUS_OFFSETS = "virusOffsets";
+        boolean integrate = false;
+        
+        
+     //   public final static String MUVARIANCE = "muVariance";
+        public final static String MUPRECISION = "muPrecision";
+        public final static String PROBACTIVENODE = "probActiveNode";
+        
+        public final static String INITIALNUMCLUSTERS = "numClusters";
+        public final static String NUMSERA = "numSeraPerCluster";
+
+        public final static String MUMEAN = "muMean";
+
+        public final static String FILE_NAME = "HIFile";
+        public final static String FILE_NAME2 = "distClusterFile";
+
+        public final static String SEEDNUM = "seed";
+        public final static String MINSIZECLUSTER = "minClusterSize";
+        
+        
+        public final static String MEANHOMOLOGOUSTITER = "meanHomologousLog2Titer";
+        public final static String UNITPERTRANSITION = "log2TiterDiffPerTransition";
+        public final static String MEASUREMENTNOISEVAR = "measurementNoiseVariance";
+        
+        public String getParserName() {
+            return CLASSNAME;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        	
+        	
+        		int initialK = 10;
+            	if (xo.hasAttribute(INITIALNUMCLUSTERS)) {
+            		initialK = xo.getIntegerAttribute(INITIALNUMCLUSTERS);
+            	}
+
+        		int numSera = 10;
+            	if (xo.hasAttribute(NUMSERA)) {
+            		numSera = xo.getIntegerAttribute(NUMSERA);
+            	}
+            	
+        		int seedNum = (int) Math.floor(Math.random()*100000);
+            	if (xo.hasAttribute(SEEDNUM)) {
+            		seedNum = xo.getIntegerAttribute(SEEDNUM);
+            	}
+            	
+            	int minSizeCluster = 5;
+            	if (xo.hasAttribute(MINSIZECLUSTER)) {
+            		minSizeCluster = xo.getIntegerAttribute(MINSIZECLUSTER);
+            	}
+            	
+
+    	        double meanHomologousTiter = 10; //log2
+    	        if(xo.hasAttribute(MEANHOMOLOGOUSTITER)){
+    	        	meanHomologousTiter = xo.getDoubleAttribute(MEANHOMOLOGOUSTITER);
+    	        }
+    	        double unitPerTransition = 2;
+    	        if(xo.hasAttribute(UNITPERTRANSITION)){
+    	        	unitPerTransition = xo.getDoubleAttribute(UNITPERTRANSITION);
+    	        }
+    	        double measurementNoiseVar = 1;
+            	if(xo.hasAttribute(MEASUREMENTNOISEVAR)){
+            		measurementNoiseVar = xo.getDoubleAttribute(MEASUREMENTNOISEVAR);
+            	}
+            	
+                TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+                XMLObject cxo  = xo.getChild(INDICATORS);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                cxo=xo.getChild(VIRUS_LOCATIONS);
+                MatrixParameter virusLocations =(MatrixParameter) cxo.getChild(MatrixParameter.class);                
+               
+                cxo=xo.getChild(VIRUS_OFFSETS);
+                Parameter virusOffsets =(Parameter) cxo.getChild(Parameter.class);      
+                
+                String fileName = xo.getStringAttribute(FILE_NAME);
+                String fileName2 = xo.getStringAttribute(FILE_NAME2);
+                
+                
+
+		        return new simulateClusters(treeModel, indicators, initialK, numSera, virusLocations, virusOffsets, fileName, fileName2, seedNum, minSizeCluster,
+		        		meanHomologousTiter, unitPerTransition, measurementNoiseVar); 
+            }
+        
+        	private final XMLSyntaxRule[] rules = {
+                AttributeRule.newStringRule(FILE_NAME, false, "The name of the file containing the assay table"),
+                AttributeRule.newStringRule(FILE_NAME2, false, "The name of the file containing the assay table"),
+                AttributeRule.newIntegerRule(INITIALNUMCLUSTERS, true, "the initial number of clusters"),
+                AttributeRule.newIntegerRule(SEEDNUM, true, "the initial number of clusters"),
+                AttributeRule.newIntegerRule(NUMSERA, true, "number of sera to simulate"),
+                AttributeRule.newIntegerRule(MINSIZECLUSTER, true, "The minimum cluster size of a virus"),
+                AttributeRule.newDoubleRule(MEANHOMOLOGOUSTITER, true, "the expected log2 titer of a homologous virus"),
+                AttributeRule.newDoubleRule(UNITPERTRANSITION, true, "the expected decrease in log2 titer value per major transition"),
+                AttributeRule.newDoubleRule(MEASUREMENTNOISEVAR, true, "the variance of the measurement noise in the log2 titer value"),
+                new ElementRule(TreeModel.class),
+                new ElementRule(INDICATORS, Parameter.class),
+                new ElementRule(VIRUS_LOCATIONS, MatrixParameter.class),
+                new ElementRule(VIRUS_OFFSETS, Parameter.class),                     
+        };
+
+            //************************************************************************
+            // AbstractXMLObjectParser implementation
+            //************************************************************************
+
+            public String getParserDescription() {
+                return "tree clustering viruses";
+            }
+
+            public Class getReturnType() {
+                return TreeClusteringVirusesPrior.class;
+            }
+
+            public XMLSyntaxRule[] getSyntaxRules() {
+                return rules;
+            }
+            
+            
+
+            
+    };
+
+    String Atribute = null;
+
+	public Model getModel() {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+
+
+
+
+	public double getLogLikelihood() {
+		// TODO Auto-generated method stub
+		return 0;
+	}
+
+
+
+
+
+	public void makeDirty() {
+		// TODO Auto-generated method stub
+		
+	}
+
+
+
+
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/ActiveIndicatorsStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/ActiveIndicatorsStatistic.java
new file mode 100644
index 0000000..5e334e0
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/ActiveIndicatorsStatistic.java
@@ -0,0 +1,125 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class ActiveIndicatorsStatistic extends Statistic.Abstract implements VariableListener {
+
+	private LinkedList<Double> activeNodes  = new LinkedList<Double>();
+	
+	private int max_dim;
+	
+    public static final String ACTIVE_INDICATORS_STATISTIC = "activeIndicatorsStatistic";
+
+    public ActiveIndicatorsStatistic(Parameter indicators, int maxDim_in) {
+        this.indicatorsParameter = indicators;
+        indicatorsParameter.addParameterListener(this);
+        max_dim = maxDim_in;
+    }
+    
+
+
+    public int getDimension() {
+        return max_dim;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	if(dim ==0){
+    		activeNodes  = new LinkedList<Double>();  // reset linkedlist	
+    	    //determine all the nodes that are active.
+    		for(int i=0; i < indicatorsParameter.getDimension(); i++){
+    			if( (int) indicatorsParameter.getParameterValue(i) == 1 ){
+    				activeNodes.addLast(new Double(i));
+    			}
+    		}
+    		//System.out.println("active node size is = " + activeNodes.size() );
+    	}
+    	
+    	
+        double val = -1;
+        if(dim < activeNodes.size()){
+        	val = activeNodes.get(dim).doubleValue();
+        }
+        
+        
+        //if the number of active nodes is more than the max number allowed, then for the last placeholder, print -9999999 instead
+        //to indicate that there are more than MAX_DIM of active nodes and we aren't able to print them all.
+        if(dim == (max_dim -1) && ( activeNodes.size() > max_dim) ){
+        	val = -9999999;
+        }
+
+       return val;
+
+    }
+
+    public String getDimensionName(int dim) {
+    	String name = "on_" + (dim+1);
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String INDICATORS = "indicators";
+        public final static String MAXDIMSTR = "maxDim";
+
+
+        public String getParserName() {
+            return ACTIVE_INDICATORS_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS);
+            
+        	int maxDim = 30;
+        	if(xo.hasAttribute(MAXDIMSTR)){
+        		maxDim = xo.getIntegerAttribute(MAXDIMSTR);
+        	}
+
+            return new ActiveIndicatorsStatistic(indicators, maxDim);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return ActiveIndicatorsStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(INDICATORS, Parameter.class),
+            AttributeRule.newDoubleRule(MAXDIMSTR, true, "the variance of mu"),
+
+        };
+    };
+
+    private Parameter indicatorsParameter;
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateLocationParameterTreeTrait.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateLocationParameterTreeTrait.java
new file mode 100644
index 0000000..31cb603
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateLocationParameterTreeTrait.java
@@ -0,0 +1,214 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+
+import dr.app.beagle.evomodel.treelikelihood.AncestralStateTraitProvider;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.TreeTrait;
+import dr.evolution.tree.TreeTraitProvider;
+import dr.evolution.tree.TreeTrait.Intent;
+import dr.evolution.tree.TreeTraitProvider.Helper;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+
+public class AnnotateLocationParameterTreeTrait implements TreeTraitProvider {
+	
+    protected Helper treeTraits = new Helper();
+    private TreeModel treeModel;
+    public static final String ANNOTATELOCATIONPARAMETERTREETRAIT = "AnnotateLocationParameterTreeTrait";
+    public final static String locationParameterStr = "virusLocationsTreeNodes";
+    //public final static String serumDriftStr = "serumDrift";  //but maybe serum drift is off
+    public final static String indicatorsStr = "indicators";
+    public final static String hasDriftStr = "hasDrift";
+
+    public final static String MU1_SCALE_PARAMETER = "mu1Scale";
+    public final static String MU2_SCALE_PARAMETER = "mu2Scale";
+    public final static String MUMEAN_PARAMETER = "muMean";
+    
+    
+    private MatrixParameter locationParameter;
+    private Parameter indicators;
+   // private Parameter serumDrift;
+    private Parameter mu1ScaleParameter;
+    private Parameter mu2ScaleParameter;
+    private Parameter muMeanParameter;
+    private boolean hasDrift;
+
+    
+    
+	
+	// public AnnotateMuTreeTrait(TreeModel treeModel_in, MatrixParameter muTreeNode_in, Parameter indicators_in, Parameter serumDrift_in, boolean driftTreeClusterMu){
+    public AnnotateLocationParameterTreeTrait(TreeModel treeModel_in, MatrixParameter locationTreeNode_in, Parameter indicators_in, boolean driftTreeClusterMu, Parameter mu1Scale, Parameter mu2Scale, Parameter muMean) {	
+    
+		 this.treeModel = treeModel_in;
+		 this.locationParameter = locationTreeNode_in;
+		 this.indicators = indicators_in;
+		 //this.serumDrift = serumDrift_in;
+		 this.mu1ScaleParameter = mu1Scale;
+		 this.mu2ScaleParameter = mu2Scale;
+		 this.muMeanParameter = muMean;
+		 this.hasDrift = driftTreeClusterMu;
+		 
+	        treeTraits.addTrait(new TreeTrait.IA() {
+	        	
+	            public String getTraitName() {
+	            	//System.out.println("print label");
+	               // return tag;
+	            	return "antigenic";
+	            }
+	            
+	            
+	            
+	            public String getTraitString(Tree tree, NodeRef node) {
+	            	
+	            	if(tree != treeModel){
+	            		System.out.println("Something is wrong. Why is tree not equal to treeModel?");
+	            		System.exit(0);
+	            	}
+
+	            		double firstCoord = locationParameter.getParameter(node.getNumber()).getParameterValue(0);
+	            		double secondCoord =  locationParameter.getParameter(node.getNumber()).getParameterValue(1);
+	            	
+	            		//if(hasDrift && serumDrift != null ){
+	            		//	//multiply the first coordinate by the drift term
+	            		//	firstCoord = firstCoord * serumDrift.getParameterValue(0);
+	            		//}
+	               		if(hasDrift && mu1ScaleParameter != null && muMeanParameter != null ){
+	            			//multiply the first coordinate by the drift term
+	            			firstCoord = firstCoord * mu1ScaleParameter.getParameterValue(0);
+	            		}
+	               		if(hasDrift && mu2ScaleParameter != null ){
+	            			//multiply the first coordinate by the drift term
+	            			secondCoord = secondCoord * mu2ScaleParameter.getParameterValue(0);
+	            		}
+	            		
+	            		String outputStr = "{"+ firstCoord + "," + secondCoord +"}";	            	
+	            	
+	            	return outputStr;
+	            }
+	            
+            
+	            
+	            public Intent getIntent() {
+	            	//System.out.println("getIntent");
+	                return Intent.NODE;
+	            }
+
+	            public Class getTraitClass() {
+	            	System.out.println("getTraitClass ran. Not expected. Quit now");
+	            	System.exit(0);
+	                return int[].class;
+	            }
+
+	            
+	            public int[] getTrait(Tree tree, NodeRef node) {
+	              //  return getStatesForNode(tree, node);
+	            	System.out.println("getTrait ran. Not expected. Quit now");
+	            	System.exit(0);
+	            	//int x[] = new int[10];
+	            	return null;
+	            }
+
+
+	        });
+
+		 
+	 }
+	 
+	 
+
+	    public TreeTrait[] getTreeTraits() {
+	    	//System.out.println("hihi");
+	        return treeTraits.getTreeTraits();
+	    }
+	
+	
+	    public TreeTrait getTreeTrait(String key) {
+	    	System.out.println("not expected to run getTreeTrait. Quit now");
+	    	System.exit(0);
+	        return treeTraits.getTreeTrait(key);
+	    }
+	 
+
+	    
+
+	    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+	        public String getParserName() {
+	            return ANNOTATELOCATIONPARAMETERTREETRAIT;
+	        }
+
+	        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+	            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+	            XMLObject cxo = xo.getChild(locationParameterStr);
+                MatrixParameter locationParameter = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+           
+                 cxo = xo.getChild(indicatorsStr);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                
+              //   cxo = xo.getChild(serumDriftStr);
+               // Parameter serumDrift = (Parameter) cxo.getChild(Parameter.class);
+           
+                boolean hasDrift = xo.getAttribute(hasDriftStr, false);
+
+                Parameter mu1Scale = null;
+                if (xo.hasChildNamed(MU1_SCALE_PARAMETER)) {
+                	mu1Scale = (Parameter) xo.getElementFirstChild(MU1_SCALE_PARAMETER);
+                }
+                
+                Parameter mu2Scale = null;
+                if (xo.hasChildNamed(MU2_SCALE_PARAMETER)) {
+                	mu2Scale = (Parameter) xo.getElementFirstChild(MU2_SCALE_PARAMETER);
+                }  
+                
+                Parameter muMean = null;
+                if(xo.hasChildNamed(MUMEAN_PARAMETER)){
+                	muMean = (Parameter) xo.getElementFirstChild(MUMEAN_PARAMETER);
+                }
+	        	// return new AnnotateMuTreeTrait( treeModel, clusterLabelsTreeNode, indicators, serumDrift,driftTreeClusterMu);
+                return new AnnotateLocationParameterTreeTrait( treeModel, locationParameter, indicators, hasDrift, mu1Scale, mu2Scale, muMean);
+
+	        }
+
+	        //************************************************************************
+	        // AbstractXMLObjectParser implementation
+	        //************************************************************************
+
+	        public String getParserDescription() {
+	            return "Integrate mu coordinates into the tree.";
+	        }
+
+	        public Class getReturnType() {
+	            return AnnotateLocationParameterTreeTrait.class;
+	        }
+
+	        public XMLSyntaxRule[] getSyntaxRules() {
+	            return rules;
+	        }
+
+	        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+                new ElementRule(locationParameterStr, MatrixParameter.class),
+                new ElementRule(indicatorsStr,Parameter.class),
+           //     new ElementRule(serumDriftStr, Parameter.class),
+                new ElementRule(MU1_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the first dimension of mu"),
+                new ElementRule(MU2_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the second dimension of mu"),
+                new ElementRule(MUMEAN_PARAMETER, Parameter.class),
+                AttributeRule.newBooleanRule(hasDriftStr, true, "whether to multiple the mu by the drift term"),
+
+	        };
+	    };
+	
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateMuTreeTrait.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateMuTreeTrait.java
new file mode 100644
index 0000000..7c869e0
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/AnnotateMuTreeTrait.java
@@ -0,0 +1,215 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+
+import dr.app.beagle.evomodel.treelikelihood.AncestralStateTraitProvider;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.TreeTrait;
+import dr.evolution.tree.TreeTraitProvider;
+import dr.evolution.tree.TreeTrait.Intent;
+import dr.evolution.tree.TreeTraitProvider.Helper;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+
+public class AnnotateMuTreeTrait implements TreeTraitProvider {
+	
+    protected Helper treeTraits = new Helper();
+    private TreeModel treeModel;
+    public static final String ANNOTATEMUPARAMETERTREETRAIT = "AnnotateMuTreeTrait";
+    public final static String muParameterStr = "mu";
+    //public final static String serumDriftStr = "serumDrift";  //but maybe serum drift is off
+    public final static String indicatorsStr = "indicators";
+    public final static String hasDriftStr = "hasDrift";
+
+    public final static String MU1_SCALE_PARAMETER = "mu1Scale";
+    public final static String MU2_SCALE_PARAMETER = "mu2Scale";
+    
+    public final static String MUMEAN_PARAMETER = "muMean";
+    
+    
+    private MatrixParameter muParameter;
+    private Parameter indicators;
+   // private Parameter serumDrift;
+    private Parameter mu1ScaleParameter;
+    private Parameter mu2ScaleParameter;
+    private Parameter muMeanParameter;
+    private boolean hasDrift;
+
+    
+    
+	
+	// public AnnotateMuTreeTrait(TreeModel treeModel_in, MatrixParameter muTreeNode_in, Parameter indicators_in, Parameter serumDrift_in, boolean driftTreeClusterMu){
+    public AnnotateMuTreeTrait(TreeModel treeModel_in, MatrixParameter muTreeNode_in, Parameter indicators_in, boolean driftTreeClusterMu, Parameter mu1Scale, Parameter mu2Scale, Parameter muMean) {	
+    
+		 this.treeModel = treeModel_in;
+		 this.muParameter = muTreeNode_in;
+		 this.indicators = indicators_in;
+		 //this.serumDrift = serumDrift_in;
+		 this.mu1ScaleParameter = mu1Scale;
+		 this.mu2ScaleParameter = mu2Scale;
+		 this.muMeanParameter = muMean;
+		 this.hasDrift = driftTreeClusterMu;
+		 
+	        treeTraits.addTrait(new TreeTrait.IA() {
+	        	
+	            public String getTraitName() {
+	            	//System.out.println("print label");
+	               // return tag;
+	            	return "mu";
+	            }
+
+	            public String getTraitString(Tree tree, NodeRef node) {
+	            	
+	            	if(tree != treeModel){
+	            		System.out.println("Something is wrong. Why is tree not equal to treeModel?");
+	            		System.exit(0);
+	            	}
+	            	
+	            	String outputStr = "{0,0}";
+	            	if( (int)indicators.getParameterValue(node.getNumber() )  ==1 ){
+	            		double firstCoord = muParameter.getParameter(node.getNumber()).getParameterValue(0);
+	            		double secondCoord =  muParameter.getParameter(node.getNumber()).getParameterValue(1);
+	            	
+	            	
+	            		//if(hasDrift && serumDrift != null ){
+	            		//	//multiply the first coordinate by the drift term
+	            		//	firstCoord = firstCoord * serumDrift.getParameterValue(0);
+	            		//}
+	               		if(hasDrift && mu1ScaleParameter != null && muMeanParameter != null ){
+	            			//multiply the first coordinate by the drift term
+	            			firstCoord =  firstCoord * mu1ScaleParameter.getParameterValue(0);
+	            		}
+	               		if(hasDrift && mu2ScaleParameter != null ){
+	            			//multiply the first coordinate by the drift term
+	            			secondCoord = secondCoord * mu2ScaleParameter.getParameterValue(0);
+	            		}
+	            		
+	            		outputStr = "{"+ firstCoord + "," + secondCoord +"}";
+	            	}
+	            	return outputStr;
+	            }
+	            
+	            
+	            public Intent getIntent() {
+	            	//System.out.println("getIntent");
+	                return Intent.NODE;
+	            }
+
+	            public Class getTraitClass() {
+	            	System.out.println("getTraitClass ran. Not expected. Quit now");
+	            	System.exit(0);
+	                return int[].class;
+	            }
+
+	            
+	            public int[] getTrait(Tree tree, NodeRef node) {
+	              //  return getStatesForNode(tree, node);
+	            	System.out.println("getTrait ran. Not expected. Quit now");
+	            	System.exit(0);
+	            	//int x[] = new int[10];
+	            	return null;
+	            }
+
+
+	        });
+
+		 
+	 }
+	 
+	 
+
+	    public TreeTrait[] getTreeTraits() {
+	    	//System.out.println("hihi");
+	        return treeTraits.getTreeTraits();
+	    }
+	
+	
+	    public TreeTrait getTreeTrait(String key) {
+	    	System.out.println("not expected to run getTreeTrait. Quit now");
+	    	System.exit(0);
+	        return treeTraits.getTreeTrait(key);
+	    }
+	 
+
+	    
+
+	    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+	        public String getParserName() {
+	            return ANNOTATEMUPARAMETERTREETRAIT;
+	        }
+
+	        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+	            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+	            XMLObject cxo = xo.getChild(muParameterStr);
+                MatrixParameter muParameter = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+           
+                 cxo = xo.getChild(indicatorsStr);
+                Parameter indicators = (Parameter) cxo.getChild(Parameter.class);
+                
+              //   cxo = xo.getChild(serumDriftStr);
+               // Parameter serumDrift = (Parameter) cxo.getChild(Parameter.class);
+           
+                boolean hasDrift = xo.getAttribute(hasDriftStr, false);
+
+                Parameter mu1Scale = null;
+                if (xo.hasChildNamed(MU1_SCALE_PARAMETER)) {
+                	mu1Scale = (Parameter) xo.getElementFirstChild(MU1_SCALE_PARAMETER);
+                }
+                
+                Parameter mu2Scale = null;
+                if (xo.hasChildNamed(MU2_SCALE_PARAMETER)) {
+                	mu2Scale = (Parameter) xo.getElementFirstChild(MU2_SCALE_PARAMETER);
+                }
+                
+                Parameter muMean = null;
+                if(xo.hasChildNamed(MUMEAN_PARAMETER)){
+                	muMean = (Parameter) xo.getElementFirstChild(MUMEAN_PARAMETER);
+                }
+	        	// return new AnnotateMuTreeTrait( treeModel, clusterLabelsTreeNode, indicators, serumDrift,driftTreeClusterMu);
+                return new AnnotateMuTreeTrait( treeModel, muParameter, indicators, hasDrift, mu1Scale, mu2Scale, muMean);
+
+	        }
+
+	        //************************************************************************
+	        // AbstractXMLObjectParser implementation
+	        //************************************************************************
+
+	        public String getParserDescription() {
+	            return "Integrate mu coordinates into the tree.";
+	        }
+
+	        public Class getReturnType() {
+	            return AnnotateMuTreeTrait.class;
+	        }
+
+	        public XMLSyntaxRule[] getSyntaxRules() {
+	            return rules;
+	        }
+
+	        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+                new ElementRule(muParameterStr, MatrixParameter.class),
+                new ElementRule(indicatorsStr,Parameter.class),
+           //     new ElementRule(serumDriftStr, Parameter.class),
+                new ElementRule(MU1_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the first dimension of mu"),
+                new ElementRule(MU2_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the second dimension of mu"),
+                new ElementRule(MUMEAN_PARAMETER, Parameter.class),
+                AttributeRule.newBooleanRule(hasDriftStr, true, "whether to multiple the mu by the drift term"),
+
+	        };
+	    };
+	
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/CausalMutationsLogger.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/CausalMutationsLogger.java
new file mode 100644
index 0000000..fcc139f
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/CausalMutationsLogger.java
@@ -0,0 +1,133 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.loggers.LogColumn;
+import dr.inference.loggers.Loggable;
+import dr.inference.loggers.NumberColumn;
+import dr.inference.model.Parameter;
+import dr.inference.model.Variable;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class CausalMutationsLogger implements Loggable {
+
+    /**
+     * @return the log columns.
+     */
+    public LogColumn[] getColumns() {
+    	 	
+    	
+    	int numDimension = treeModel.getNodeCount();
+        LogColumn[] columns = new LogColumn[numDimension];
+        for (int i = 0; i < numDimension; i++) {
+            //columns[i] = new StatisticColumn(getDimensionName(i), i);
+        	String traitName = "" + (i) + ":";
+	    	LinkedList<Integer>[] mutationList= clusterPrior.getMutationList();
+	    	if(mutationList[i] != null){
+		    	Iterator itr = mutationList[i].iterator();
+		    	int count = 0;
+		    	while(itr.hasNext()){
+		    		if(count > 0){
+		    			traitName +=",";
+		    		}
+		    		int curMutation = ((Integer) itr.next()).intValue();
+		    		traitName += curMutation;
+		    		count++;
+		    	}
+	    	}
+        	
+        	
+        	final int curNode = i;
+        	columns[i] = new LogColumn.Abstract(traitName){
+        		@Override
+        		protected String getFormattedValue(){
+        			//return "AAA";
+        	    	LinkedList<Integer>[] causalList= clusterPrior.getCausalList();
+        			return parseCausalList(causalList[curNode]);
+        		}
+        	};
+        }
+        return columns;
+    }
+    
+    
+    
+    public String parseCausalList(LinkedList<Integer> causalMutations){
+    	String stateList = "";
+    	if(causalMutations != null){
+	    	Iterator itr = causalMutations.iterator();
+	    	while(itr.hasNext()){
+	    		int curState = ((Integer) itr.next()).intValue();
+	    		stateList += curState;
+	    	}
+	    	stateList = "s" + new StringBuilder(stateList).reverse().toString(); //need to reverse printing the states to be correct.
+    	}
+    	else{
+    		stateList = "s";
+    	}
+    	 return(stateList);
+    }
+    
+    public static final String PARSER_NAME = "CausalMutationsLogger";
+    private TreeModel treeModel;
+    private TreeClusteringVirusesPrior clusterPrior;
+    
+    public CausalMutationsLogger(TreeModel tree, TreeClusteringVirusesPrior clusterPrior_in) {
+        this.treeModel = tree;
+		 this.clusterPrior = clusterPrior_in;
+    }
+    
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+
+        public String getParserName() {
+            return PARSER_NAME;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            TreeClusteringVirusesPrior clusterPrior = (TreeClusteringVirusesPrior) xo.getChild(TreeClusteringVirusesPrior.class);
+ 
+            return new CausalMutationsLogger( treeModel, clusterPrior);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return ".";
+        }
+
+        public Class getReturnType() {
+            return CausalMutationsLogger.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+	            new ElementRule(TreeClusteringVirusesPrior.class),
+        };
+    };
+
+
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesStatistic.java
new file mode 100644
index 0000000..ca38025
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesStatistic.java
@@ -0,0 +1,345 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+
+public class ClusterLabelsVirusesStatistic extends Statistic.Abstract implements VariableListener {
+
+	private  int max_dim ;
+	
+	private LinkedList<Double> path  = new LinkedList<Double>();
+//	private int[] fromMembership;
+	private int[] clusterLabels;
+//	private Parameter clusterLabels;
+	private Parameter indicators;
+	private MatrixParameter virusLocations;
+    private TreeModel treeModel;
+
+    public static final String CLUSTERLABELS_STATISTIC = "clusterLabelsVirusesStatistic";
+    
+    
+    int []membershipToClusterLabelIndexes = null;        
+
+
+    public ClusterLabelsVirusesStatistic(TreeModel tree, Parameter indicators, int max_dim_in, MatrixParameter virusLocations_in) {
+    //public PathStatistic(Parameter clusterLabels, TreeModel tree, Parameter indicators, int max_dim_in) {
+      //  this.clusterLabels = clusterLabels;
+        this.treeModel = tree;
+        this.indicators = indicators;
+        this.max_dim = max_dim_in;
+        this.virusLocations = virusLocations_in;
+        
+      //  clusterLabels.addParameterListener(this);
+        indicators.addParameterListener(this);
+        virusLocations.addParameterListener(this);
+        
+		setMembershipToClusterLabelIndexes(); // if the tree doesn't change, then I really don't have to do it all the time
+    }
+    
+
+
+    public int getDimension() {
+		int numdata = virusLocations.getColumnDimension();
+        return numdata;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	if(dim ==0){
+    		//Note: if the tree doesn't change, then I don't have to run setMembershipToClusterLabelIndexes every time
+    		setClusterLabelsUsingIndicators();
+    	}      
+       return ((double) clusterLabels[dim]);
+
+    }
+
+    
+    
+    
+
+	private void setMembershipToClusterLabelIndexes(){
+
+  	   //I suspect this is an expensive operation, so I don't want to do it many times,
+  	   //which is also unnecessary  - MAY have to update whenever a different tree is used.
+		int numdata = virusLocations.getColumnDimension();
+		int numNodes = treeModel.getNodeCount();
+         membershipToClusterLabelIndexes = new int[numdata]; 
+         clusterLabels = new int[numdata];
+         for(int i=0; i < numdata; i++){
+  		   Parameter v = virusLocations.getParameter(i);
+  		   String curName = v.getParameterName();
+  		  // System.out.println(curName);
+  		   int isFound = 0;
+      	   for(int j=0; j < numNodes; j++){
+      		   String treeId = treeModel.getTaxonId(j);
+      		   if(curName.equals(treeId) ){
+      		//	   System.out.println("  isFound at j=" + j);
+      			   membershipToClusterLabelIndexes[i] = j;
+      			   isFound=1;
+      			   break;
+      		   }	   
+      	   }
+      	   if(isFound ==0){
+      		   System.out.println("not found. Exit now.");
+      		   System.exit(0);
+      	   }     	   
+         }
+    }
+	
+	
+	private void setClusterLabelsUsingIndicators(){
+
+        int []membership = determine_membership_v2(treeModel);
+		int numdata = virusLocations.getColumnDimension();
+        for(int i=0; i < numdata; i++){   
+        	clusterLabels[i] = membership[membershipToClusterLabelIndexes[i]] ;
+        }
+	}
+	
+	
+
+    //traverse down the tree, top down, do calculation
+     int[] determine_membership_v2(TreeModel treeModel){
+	    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    		if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    			numClusters++ ;
+	    			membership[ curElement.getNumber() ] = numClusters - 1; 
+	      	 	}
+	    		else{
+	    			//inherit from parent's cluster assignment
+	    			membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    		}        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(membership);
+    }
+
+	
+
+
+
+    
+    
+    //traverse down the tree, top down, do calculation
+    int[] determine_from_membership_v2(TreeModel treeModel){
+	    	//note: I set MAX_DIM as the most I would print, but in order to avoid bug, I 
+    	//declare the number of nodes as the most active nodes I can have.
+    	int[] fromMembership = new int[treeModel.getNodeCount()];
+    	for(int i=0; i < treeModel.getNodeCount(); i++){
+    		fromMembership[i ] = -1;
+    	}
+    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    	 if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    //		 System.out.print("indicator # " + curElement.getNumber()  + " ");
+	    		numClusters++ ;
+	    		membership[ curElement.getNumber() ] = numClusters - 1; 
+	    		fromMembership[numClusters -1] = membership[ treeModel.getParent(curElement).getNumber()];
+	    //		System.out.println("    membership " + (numClusters-1) + " assigned from " + membership[ treeModel.getParent(curElement).getNumber()] );
+	      	}
+	    	else{
+	    		//inherit from parent's cluster assignment
+	    		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    	 }
+	    	        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(fromMembership);
+   }
+
+	
+    
+    
+    
+    
+
+	//private LinkedList<Double> setPath() {
+	
+		    
+		 //return(0);
+		
+	//}
+
+
+
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    public String getDimensionName(int dim) {
+    	
+		Parameter v = virusLocations.getParameter(dim);
+		String curName = v.getParameterName();
+    	
+        return curName;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+       // public final static String CLUSTERLABELS = "clusterLabels";
+        public final static String INDICATORS = "indicators";        
+        public final static String MAXDIMSTR = "maxDim";
+        public final static String VIRUSLOCATIONS = "virusLocations";        
+
+
+
+        public String getParserName() {
+            return CLUSTERLABELS_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            //Parameter clusterLabels = (Parameter) xo.getElementFirstChild(CLUSTERLABELS);
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS);
+            MatrixParameter virusLocations = (MatrixParameter) xo.getElementFirstChild(VIRUSLOCATIONS);
+
+        	int maxDim = 30;
+        	if(xo.hasAttribute(MAXDIMSTR)){
+        		maxDim = xo.getIntegerAttribute(MAXDIMSTR);
+        	}
+
+           // return new PathStatistic(clusterLabels, treeModel, indicators, maxDim);
+        	 return new ClusterLabelsVirusesStatistic( treeModel, indicators, maxDim, virusLocations);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return ClusterLabelsVirusesStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            //new ElementRule(CLUSTERLABELS, Parameter.class),
+            new ElementRule(TreeModel.class),
+            new ElementRule(INDICATORS, Parameter.class),
+            new ElementRule(VIRUSLOCATIONS, Parameter.class),
+            AttributeRule.newDoubleRule(MAXDIMSTR, true, "the variance of mu"),
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesTreeTrait.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesTreeTrait.java
new file mode 100644
index 0000000..4eb017e
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/ClusterLabelsVirusesTreeTrait.java
@@ -0,0 +1,158 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+
+import dr.app.beagle.evomodel.treelikelihood.AncestralStateTraitProvider;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.TreeTrait;
+import dr.evolution.tree.TreeTraitProvider;
+import dr.evolution.tree.TreeTrait.Intent;
+import dr.evolution.tree.TreeTraitProvider.Helper;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Parameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class ClusterLabelsVirusesTreeTrait implements TreeTraitProvider {
+	
+    protected Helper treeTraits = new Helper();
+    private TreeModel treeModel;
+    public static final String CLUSTERLABELSTREETRAIT = "ClusterLabelsVirusesTreeTrait";
+    public final static String CLUSTERLABELSTREENODE = "clusterLabelsTreeNodes";
+    
+    private Parameter clusterLabelsTreeNode;
+
+	
+	 public ClusterLabelsVirusesTreeTrait(TreeModel treeModel_in, Parameter clusterLabelsTreeNode_in){
+		 
+		 this.treeModel = treeModel_in;
+		 this.clusterLabelsTreeNode = clusterLabelsTreeNode_in;
+		 
+	        treeTraits.addTrait(new TreeTrait.IA() {
+	        	
+	            public String getTraitName() {
+	            	//System.out.println("print label");
+	               // return tag;
+	            	return "cluster";
+	            }
+
+	            public String getTraitString(Tree tree, NodeRef node) {
+	            	
+	            	if(tree != treeModel){
+	            		System.out.println("Something is wrong. Why is tree not equal to treeModel?");
+	            		System.exit(0);
+	            	}
+
+	            	//the problem is, I don't know how to only do the processing to get the clusterLabels
+	            	//right before this routine is run... 
+	            	//so I have to keep a parameter or variable somewhere to store the clusterLabels information of the tree nodes
+	            	//whenever the cluster assignment is changed.. 
+	            	//the states have to be precomputed so this only prints..
+	            	
+	            	//System.out.println("node=" + node.getNumber());
+	            	
+	            	String clusterLabelString = ((int) clusterLabelsTreeNode.getParameterValue(node.getNumber()) ) + "";
+	            	//String clusterLabelString = node.getNumber() + "";  //to get the node numbering of the tree.
+	            	
+	            	//if(node.getNumber() ==0){
+	            	//	//System.out.println("print ");
+	            	//	for(int i=0; i < treeModel.getNodeCount(); i++){
+	            	//		System.out.print(  clusterLabelsTreeNode.getParameterValue(i) + "\t");
+	            	//	}
+	            	//	System.out.println("");
+	            	//}
+	                //return formattedState(getStatesForNode(tree, node), dataType);
+	            	return clusterLabelString;
+	            }
+	            
+	            
+	            public Intent getIntent() {
+	            	//System.out.println("getIntent");
+	                return Intent.NODE;
+	            }
+
+	            public Class getTraitClass() {
+	            	System.out.println("getTraitClass ran. Not expected. Quit now");
+	            	System.exit(0);
+	                return int[].class;
+	            }
+
+	            
+	            public int[] getTrait(Tree tree, NodeRef node) {
+	              //  return getStatesForNode(tree, node);
+	            	System.out.println("getTrait ran. Not expected. Quit now");
+	            	System.exit(0);
+	            	//int x[] = new int[10];
+	            	return null;
+	            }
+
+
+	        });
+
+		 
+	 }
+	 
+	 
+
+	    public TreeTrait[] getTreeTraits() {
+	    	//System.out.println("hihi");
+	        return treeTraits.getTreeTraits();
+	    }
+	
+	
+	    public TreeTrait getTreeTrait(String key) {
+	    	System.out.println("not expected to run getTreeTrait. Quit now");
+	    	System.exit(0);
+	        return treeTraits.getTreeTrait(key);
+	    }
+	 
+
+	    
+
+	    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+	        public String getParserName() {
+	            return CLUSTERLABELSTREETRAIT;
+	        }
+
+	        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+	            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+	            XMLObject cxo = xo.getChild(CLUSTERLABELSTREENODE);
+                Parameter clusterLabelsTreeNode = (Parameter) cxo.getChild(Parameter.class);
+           
+	        	 return new ClusterLabelsVirusesTreeTrait( treeModel, clusterLabelsTreeNode);
+
+	        }
+
+	        //************************************************************************
+	        // AbstractXMLObjectParser implementation
+	        //************************************************************************
+
+	        public String getParserDescription() {
+	            return "Integrate ClusterLabels of viruses into the tree.";
+	        }
+
+	        public Class getReturnType() {
+	            return ClusterLabelsVirusesTreeTrait.class;
+	        }
+
+	        public XMLSyntaxRule[] getSyntaxRules() {
+	            return rules;
+	        }
+
+	        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+                new ElementRule(CLUSTERLABELSTREENODE, Parameter.class),
+
+	        };
+	    };
+	
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedMuStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedMuStatistic.java
new file mode 100644
index 0000000..087f906
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedMuStatistic.java
@@ -0,0 +1,168 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class DriftedMuStatistic extends Statistic.Abstract implements VariableListener {
+
+	static  int MAX_DIM = 30;
+	
+
+	private MatrixParameter mu;
+    private TreeModel treeModel;
+    private Parameter indicators;
+//    private Parameter locationDrift;
+    private Parameter mu1ScaleParameter;
+    private Parameter mu2ScaleParameter;
+    private Parameter muMeanParameter;
+    
+    public static final String DRIFTED_MU_STATISTIC = "DriftedMuStatistic";
+
+   // public DriftedMuStatistic( TreeModel tree, MatrixParameter mu, Parameter indicators, Parameter locationDrift) {
+    public DriftedMuStatistic( TreeModel tree, MatrixParameter mu, Parameter indicators,  Parameter mu1Scale, Parameter mu2Scale, Parameter muMean) {
+        
+        this.treeModel = tree;
+        this.mu = mu;
+        this.indicators = indicators;
+       // this.locationDrift = locationDrift;
+        this.mu1ScaleParameter = mu1Scale;
+        mu1ScaleParameter.addParameterListener(this);
+        this.mu2ScaleParameter = mu2Scale;
+        mu2ScaleParameter.addParameterListener(this);
+        
+        this.muMeanParameter = muMean;
+        muMeanParameter.addParameterListener(this);
+        
+        mu.addParameterListener(this);
+        indicators.addParameterListener(this);
+        //locationDrift.addParameterListener(this);
+    }
+    
+
+
+    public int getDimension() {
+        return treeModel.getNodeCount()*2;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	int curNode = dim/2;
+    	double value = mu.getParameter(curNode).getParameterValue(dim % 2);
+    	
+    	//if((int) indicators.getParameterValue(curNode)  == 0){
+    	//	value = 0;
+    	//}
+    	
+
+	    	if(  dim % 2 == 0 ){
+	    		//value = value * locationDrift.getParameterValue(0) ;
+	    		value =   value * mu1ScaleParameter.getParameterValue(0);
+	    	}
+	    	else{
+	    		value = value * mu2ScaleParameter.getParameterValue(0);
+			
+	    	}
+    
+    	
+          return (  value );
+
+    }
+
+    
+    
+     
+    
+    
+    public String getDimensionName(int dim) {
+    	String name = "mu_" +  ((dim/2) )  + "-" + ((dim %2 ) +1 );
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String MU_STRING = "mu";
+        public final static String INDICATORS_STRING = "indicators";
+       // public final static String LOCATION_DRIFT = "locationDrift";
+        public final static String MU1_SCALE_PARAMETER = "mu1Scale";
+        public final static String MU2_SCALE_PARAMETER = "mu2Scale";
+        public final static String MU_MEAN_PARAMETER = "muMean";
+        
+        public String getParserName() {
+            return DRIFTED_MU_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            MatrixParameter muParam = (MatrixParameter) xo.getElementFirstChild(MU_STRING);
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS_STRING);
+            //Parameter locationDrift = (Parameter) xo.getElementFirstChild(LOCATION_DRIFT);
+            Parameter mu1Scale = null;
+            if (xo.hasChildNamed(MU1_SCALE_PARAMETER)) {
+            	mu1Scale = (Parameter) xo.getElementFirstChild(MU1_SCALE_PARAMETER);
+            }
+            
+            Parameter mu2Scale = null;
+            if (xo.hasChildNamed(MU2_SCALE_PARAMETER)) {
+            	mu2Scale = (Parameter) xo.getElementFirstChild(MU2_SCALE_PARAMETER);
+            }  
+            
+            Parameter muMean = null;
+            if(xo.hasChildNamed(MU_MEAN_PARAMETER)){
+            	muMean = (Parameter) xo.getElementFirstChild(MU_MEAN_PARAMETER);
+            }
+            
+            //return new DriftedMuStatistic( treeModel, muParam, indicators, locationDrift);
+            return new DriftedMuStatistic( treeModel, muParam, indicators, mu1Scale, mu2Scale, muMean);
+            
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return DriftedMuStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(TreeModel.class),
+            new ElementRule(MU_STRING, Parameter.class),
+            new ElementRule(INDICATORS_STRING, Parameter.class),
+            //new ElementRule(LOCATION_DRIFT, Parameter.class)
+            new ElementRule(MU1_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the first dimension of mu"),
+            new ElementRule(MU2_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the second dimension of mu"), 
+            new ElementRule(MU_MEAN_PARAMETER, Parameter.class)
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedTreeClusterLocationsStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedTreeClusterLocationsStatistic.java
new file mode 100644
index 0000000..3d6d047
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriftedTreeClusterLocationsStatistic.java
@@ -0,0 +1,158 @@
+/*
+ * DriftedTreeClusterLocationsStatistic.java
+ *
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ * @author Charles Cheung
+ * 
+ * adapted from the code:
+ * @author Trevor Bedford
+ * @author Marc A. Suchard
+ */
+public class DriftedTreeClusterLocationsStatistic extends Statistic.Abstract implements VariableListener {
+
+    public static final String DRIFTED_TREE_CLUSTER_LOCATIONS_STATISTIC = "driftedTreeClusterLocationsStatistic";
+
+    public DriftedTreeClusterLocationsStatistic(MatrixParameter locationsParameter, Parameter mu1Scale, Parameter mu2Scale) {
+    //public DriftedTreeClusterLocationsStatistic(MatrixParameter locationsParameter, Parameter locationDriftParameter, Parameter mu1Scale, Parameter mu2Scale) {
+        this.locationsParameter = locationsParameter;
+        locationsParameter.addParameterListener(this);
+    //    this.locationDriftParameter = locationDriftParameter;
+     //   locationDriftParameter.addParameterListener(this);
+        
+        this.mu1ScaleParameter = mu1Scale;
+        mu1ScaleParameter.addParameterListener(this);
+        this.mu2ScaleParameter = mu2Scale;
+        mu2ScaleParameter.addParameterListener(this);
+    }
+
+    public int getDimension() {
+        return locationsParameter.getDimension();
+    }
+
+    // strain index
+    public int getColumnIndex(int dim) {
+        return dim / locationsParameter.getRowDimension();
+    }
+
+    // dimension index
+    public int getRowIndex(int dim) {
+        int x = getColumnIndex(dim);
+        return dim - x * locationsParameter.getRowDimension();
+    }
+
+    public double getStatisticValue(int dim) {
+
+        double val;
+
+        // x is location count, y is location dimension
+        int x = getColumnIndex(dim);
+        int y = getRowIndex(dim);
+        Parameter loc = locationsParameter.getParameter(x);
+
+       	if (y == 0) {
+            //val = loc.getParameterValue(y)* locationDriftParameter.getParameterValue(0);
+        		val = loc.getParameterValue(y)* mu1ScaleParameter.getParameterValue(0);
+        	}
+        else {
+            //val = loc.getParameterValue(y);
+        	val = loc.getParameterValue(y)* mu2ScaleParameter.getParameterValue(0);
+        }
+        
+
+        return val;
+
+    }
+
+    public String getDimensionName(int dim) {
+        return locationsParameter.getDimensionName(dim);
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String LOCATIONS = "locations";
+        //public final static String LOCATION_DRIFT = "locationDrift";
+
+        public final static String MU1_SCALE_PARAMETER = "mu1Scale";
+        public final static String MU2_SCALE_PARAMETER = "mu2Scale";
+        
+        public String getParserName() {
+            return DRIFTED_TREE_CLUSTER_LOCATIONS_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            MatrixParameter locations = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
+            //Parameter locationDrift = (Parameter) xo.getElementFirstChild(LOCATION_DRIFT);
+            Parameter mu1Scale = null;
+            if (xo.hasChildNamed(MU1_SCALE_PARAMETER)) {
+            	mu1Scale = (Parameter) xo.getElementFirstChild(MU1_SCALE_PARAMETER);
+            }
+            
+            Parameter mu2Scale = null;
+            if (xo.hasChildNamed(MU2_SCALE_PARAMETER)) {
+            	mu2Scale = (Parameter) xo.getElementFirstChild(MU2_SCALE_PARAMETER);
+            }  
+            
+  //          return new DriftedTreeClusterLocationsStatistic(locations, locationDrift, mu1Scale, mu2Scale);
+            return new DriftedTreeClusterLocationsStatistic(locations, mu1Scale, mu2Scale);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return DriftedTreeClusterLocationsStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(LOCATIONS, MatrixParameter.class),
+          //  new ElementRule(LOCATION_DRIFT, Parameter.class, "TO BECOME OBSOLETE", true),
+            new ElementRule(MU1_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the first dimension of mu"),
+            new ElementRule(MU2_SCALE_PARAMETER, Parameter.class, "Optional parameter for scaling the second dimension of mu"),        };
+    };
+
+    private MatrixParameter locationsParameter;
+//    private Parameter locationDriftParameter;
+    private Parameter mu1ScaleParameter;
+    private Parameter mu2ScaleParameter;
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/DriverCountStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriverCountStatistic.java
new file mode 100644
index 0000000..8847b0d
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/DriverCountStatistic.java
@@ -0,0 +1,122 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+
+public class DriverCountStatistic extends Statistic.Abstract implements VariableListener {
+
+	
+    public static final String DRIVERCOUNT_STATISTIC = "DriverCountStatistic";
+
+    private TreeModel treeModel;
+    private TreeClusteringVirusesPrior clusterPrior;
+
+
+    
+    public DriverCountStatistic(TreeModel tree, TreeClusteringVirusesPrior clusterPrior_in) {
+        this.treeModel = tree;
+		 this.clusterPrior = clusterPrior_in;
+    }
+    
+
+
+    public int getDimension() {
+        int[] causalCount = clusterPrior.getCausalCount();
+		int numdata = causalCount.length *2;
+        return numdata;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+       	
+       int[] causalCount = clusterPrior.getCausalCount();
+       int[] nonCausalCount = clusterPrior.getNonCausalCount();
+             
+       int index =  dim/2;
+       double value = -1;
+   		if(dim%2==0){
+   			value = causalCount[index];
+   		}
+   		else{
+			value = nonCausalCount[index];
+		}
+        //System.out.println("dim=" + dim +  " dim%2=" + dim%2 + " and index = " + index + " value = " + value);
+
+       return ( value);
+    }
+
+
+    
+    public String getDimensionName(int dim) {
+    	String name = "";
+    	if(dim%2==0){
+    		name += "C";
+    	}
+    	else{
+    		name += "N";
+    	}
+    	name += ""+  (dim/2 +1);
+    	return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+
+        public String getParserName() {
+            return DRIVERCOUNT_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            TreeClusteringVirusesPrior clusterPrior = (TreeClusteringVirusesPrior) xo.getChild(TreeClusteringVirusesPrior.class);
+ 
+            return new DriverCountStatistic( treeModel, clusterPrior);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return ".";
+        }
+
+        public Class getReturnType() {
+            return DriverCountStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+	            new ElementRule(TreeClusteringVirusesPrior.class),
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/KStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/KStatistic.java
new file mode 100644
index 0000000..a5e98aa
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/KStatistic.java
@@ -0,0 +1,98 @@
+
+
+
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+
+
+public class KStatistic extends Statistic.Abstract implements VariableListener {
+
+	
+		
+    public static final String K_STATISTIC = "KStatistic";
+
+    public KStatistic(Parameter indicators) {
+        this.indicatorsParameter = indicators;
+        indicatorsParameter.addParameterListener(this);
+    }
+    
+
+
+    public int getDimension() {
+        return 1;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	double count = 0;
+   		for(int i=0; i < indicatorsParameter.getDimension(); i++){
+   			if( (int) indicatorsParameter.getParameterValue(i) == 1 ){
+   				count++;
+   			}
+   		}
+
+       return count;
+
+    }
+
+    public String getDimensionName(int dim) {
+    	String name = "K";
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String INDICATORS = "indicators";
+
+        public String getParserName() {
+            return K_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS);
+            return new KStatistic(indicators);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return KStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(INDICATORS, Parameter.class)
+        };
+    };
+
+    private Parameter indicatorsParameter;
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/MutationsTreeTrait.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/MutationsTreeTrait.java
new file mode 100644
index 0000000..dad965f
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/MutationsTreeTrait.java
@@ -0,0 +1,275 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import dr.evolution.alignment.ConvertAlignment;
+import dr.evolution.alignment.SimpleAlignment;
+import dr.evolution.datatype.AminoAcids;
+import dr.evolution.datatype.DataType;
+import dr.evolution.datatype.GeneticCode;
+import dr.evolution.datatype.Nucleotides;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+
+import dr.app.beagle.evomodel.treelikelihood.AncestralStateTraitProvider;
+import dr.evolution.sequence.Sequence;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.TreeTrait;
+import dr.evolution.tree.TreeTraitProvider;
+import dr.evolution.tree.TreeTrait.Intent;
+import dr.evolution.tree.TreeTraitProvider.Helper;
+import dr.evolution.util.Taxon;
+import dr.evomodel.antigenic.phyloClustering.TreeClusteringVirusesPrior;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Parameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class MutationsTreeTrait implements TreeTraitProvider {
+	
+    protected Helper treeTraits = new Helper();
+    private TreeModel treeModel;
+    private TreeClusteringVirusesPrior clusterPrior;
+    public static final String MUTATIONS_TREETRAIT = "MutationsTreeTrait";
+
+    
+    private String[] mutationString;
+    
+    private LinkedList<Integer>[] mutationList;
+    private LinkedList<Integer>[] causalList;
+    
+	
+	 public MutationsTreeTrait(TreeModel treeModel_in, TreeClusteringVirusesPrior clusterPrior_in){
+		 
+		 this.treeModel = treeModel_in;
+		 this.clusterPrior = clusterPrior_in;
+		 
+
+		 		 
+	        //alignment.setDataType(siteModel.getSubstitutionModel().getDataType());
+
+		 	int numNodes = treeModel.getNodeCount();
+	        // Get sequences
+	        String[] sequence = new String[numNodes];
+	        
+	     // Universal
+    		String GENETIC_CODE_TABLES ="KNKNTTTTRSRSIIMIQHQHPPPPRRRRLLLLEDEDAAAAGGGGVVVV*Y*YSSSS*CWCLFLF";
+
+    		int numCodons = clusterPrior.getNumSites();
+    		 //int numCodons = 330;
+	        for(int curIndex = 0; curIndex < numNodes; curIndex ++){
+	    		String ns =  (String) treeModel.getNodeAttribute( treeModel.getNode(curIndex), "states");
+	
+	    		ns = ns.substring(clusterPrior.getStartBase(), clusterPrior.getEndBase() );
+	    		//ns = ns.substring(3+27, ns.length() - 1);
+	    		//System.out.println(ns);
+	    		
+	    		
+	    		//numCodons = ns.length()/3;  // or do I care about only 330?
+
+	    		//System.out.println(numCodons);
+	    		String codonSequence = "";
+	    		for(int codon=0; codon< numCodons; codon++){
+	    			
+	    			int nuc1 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3)];
+	    			int nuc2 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3+1)];
+	    			int nuc3 =  Nucleotides.NUCLEOTIDE_STATES[ns.charAt(codon*3+2)];
+	    			
+	    			int canonicalState = (nuc1 * 16) + (nuc2 * 4) + nuc3;
+	    			
+	    			codonSequence = codonSequence + GENETIC_CODE_TABLES.charAt(canonicalState);
+	    		}
+				//System.out.println(codonSequence);
+	            sequence[curIndex] = codonSequence;
+	    		
+	        }
+
+
+	        mutationString = new String[treeModel.getNodeCount()];
+
+			NodeRef cNode = treeModel.getRoot();
+		    LinkedList<NodeRef> visitlist = new LinkedList<NodeRef>();
+		    
+		    visitlist.add(cNode);
+		    
+		    int countProcessed=0;
+		    while(visitlist.size() > 0){
+		    	countProcessed++;
+		    	//assign value to the current node...
+		    	if(treeModel.getParent(cNode) == null){  //this means it is a root node
+		    		//visiting the root
+		    		//System.out.println(cNode.getNumber() + ":\t" + "root");
+		    	}
+		    	else{
+		    		//visiting
+		    		//System.out.print(cNode.getNumber() + ":\t");
+
+		    		//String listMutations = "\"";
+		    		mutationString[cNode.getNumber()]  = "\"";
+		    		String nodeState =  sequence[cNode.getNumber()];
+		    		String parentState =  sequence[treeModel.getParent(cNode).getNumber()];
+		    		           
+		    		int count = 0;
+		    		for(int i=0; i < numCodons; i++){
+		    			if(nodeState.charAt(i) != parentState.charAt(i)){
+		    				count++;
+		    				if(count>1){
+		    					//System.out.print(",");
+		    					mutationString[cNode.getNumber()] =  mutationString[cNode.getNumber()] + ",";
+		    				}
+		    				//System.out.print(i+1);
+		    				mutationString[cNode.getNumber()] =  mutationString[cNode.getNumber()] + (i+1);  //i+1 so mutation starts from 1 - 330
+		    			}
+		    			
+		    			//store in linked list
+		    		}
+		    		//System.out.println("");
+		    		mutationString[cNode.getNumber()]  = mutationString[cNode.getNumber()]  + "\"";
+		    	}
+		    	
+				//System.out.println(cNode.getNumber() + "\t" +  treeModel.getNodeAttribute(cNode, "states") );
+
+		    	
+		    	//add all the children to the queue
+	  			for(int childNum=0; childNum < treeModel.getChildCount(cNode); childNum++){
+	  				NodeRef node= treeModel.getChild(cNode,childNum);
+	  				visitlist.add(node);
+	  	        }
+	  			
+		  			
+		  		visitlist.pop(); //now that we have finished visiting this node, pops it out of the queue
+
+	  			if(visitlist.size() > 0){
+	  				cNode = visitlist.getFirst(); //set the new first node in the queue to visit
+	  			}
+	  			
+				
+		}
+
+treeTraits.addTrait(new TreeTrait.IA() {
+	
+	        	
+	            public String getTraitName() {          	
+	            	return "mutations";
+	            }
+
+	            public String getTraitString(Tree tree, NodeRef node) {
+	            	if(tree != treeModel){
+	            		System.out.println("Something is wrong. Why is tree not equal to treeModel?");
+	            		System.exit(0);
+	            	}
+
+	            	//String nodeString = mutationString[node.getNumber()];  //to get the node numbering of the tree.
+
+	            	
+	      		  	mutationList = clusterPrior.getMutationsPerNode();
+	      		  	causalList = clusterPrior.getCausativeStatesPerNode();
+	            	
+	            	String nodeString = "\"";
+	            			
+        	    	if(mutationList[node.getNumber()] != null){
+        		    	Iterator itr = mutationList[node.getNumber()].iterator();
+        		    	Iterator itr2 = causalList[node.getNumber()].iterator();
+        		    	int count = 0;
+        		    	while(itr.hasNext()){
+        		    		count++;
+        		    		if(count>1){
+        		    			nodeString += ",";
+        		    		}
+        		    		int curMutation = ((Integer) itr.next()).intValue();
+        		    		nodeString += curMutation;
+        		    		int curCausal = ((Integer) itr2.next()).intValue();
+        		    		if(curCausal ==1){
+        		    			nodeString += "*";
+        		    		}
+        		    	}
+        	    	}
+        	    	nodeString += "\"";
+	            	
+	            	return nodeString;
+	            }
+	            
+	            
+	            public Intent getIntent() {
+	            	//System.out.println("getIntent");
+	                return Intent.NODE;
+	            }
+
+	            public Class getTraitClass() {
+	            	System.out.println("getTraitClass ran. Not expected. Quit now");
+	            	System.exit(0);
+	                return int[].class;
+	            }
+
+	            
+	            public int[] getTrait(Tree tree, NodeRef node) {
+	            	System.out.println("getTrait ran. Not expected. Quit now");
+	            	System.exit(0);
+	            	return null;
+	            }
+
+
+	        });
+
+		 
+	 }
+	 
+	 
+
+	    public TreeTrait[] getTreeTraits() {
+	        return treeTraits.getTreeTraits();
+	    }
+	
+	
+	    public TreeTrait getTreeTrait(String key) {
+	    	System.out.println("not expected to run getTreeTrait. Quit now");
+	    	System.exit(0);
+	        return treeTraits.getTreeTrait(key);
+	    }
+	 
+
+	    
+
+	    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+	        public String getParserName() {
+	            return MUTATIONS_TREETRAIT;
+	        }
+
+	        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+	            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+	            TreeClusteringVirusesPrior clusterPrior = (TreeClusteringVirusesPrior) xo.getChild(TreeClusteringVirusesPrior.class);
+	        	 return new MutationsTreeTrait( treeModel, clusterPrior);
+
+	        }
+
+	        //************************************************************************
+	        // AbstractXMLObjectParser implementation
+	        //************************************************************************
+
+	        public String getParserDescription() {
+	            return "Display node number in the tree.";
+	        }
+
+	        public Class getReturnType() {
+	            return MutationsTreeTrait.class;
+	        }
+
+	        public XMLSyntaxRule[] getSyntaxRules() {
+	            return rules;
+	        }
+
+	        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+	            new ElementRule(TreeClusteringVirusesPrior.class),
+	        };
+	    };
+	
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/NodeNumberTreeTrait.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/NodeNumberTreeTrait.java
new file mode 100644
index 0000000..e864500
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/NodeNumberTreeTrait.java
@@ -0,0 +1,126 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+
+import dr.app.beagle.evomodel.treelikelihood.AncestralStateTraitProvider;
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.TreeTrait;
+import dr.evolution.tree.TreeTraitProvider;
+import dr.evolution.tree.TreeTrait.Intent;
+import dr.evolution.tree.TreeTraitProvider.Helper;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Parameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.AttributeRule;
+import dr.xml.ElementRule;
+import dr.xml.XMLObject;
+import dr.xml.XMLObjectParser;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+public class NodeNumberTreeTrait implements TreeTraitProvider {
+	
+    protected Helper treeTraits = new Helper();
+    private TreeModel treeModel;
+    public static final String NODE_NUMBER_TREETRAIT = "NodeNumberTreeTrait";
+
+	
+	 public NodeNumberTreeTrait(TreeModel treeModel_in){
+		 
+		 this.treeModel = treeModel_in;
+		 
+	        treeTraits.addTrait(new TreeTrait.IA() {
+	        	
+	            public String getTraitName() {
+	            	return "node";
+	            }
+
+	            public String getTraitString(Tree tree, NodeRef node) {
+	            	
+	            	if(tree != treeModel){
+	            		System.out.println("Something is wrong. Why is tree not equal to treeModel?");
+	            		System.exit(0);
+	            	}
+
+	            	String nodeString = node.getNumber() + "";  //to get the node numbering of the tree.
+	            	return nodeString;
+	            }
+	            
+	            
+	            public Intent getIntent() {
+	            	//System.out.println("getIntent");
+	                return Intent.NODE;
+	            }
+
+	            public Class getTraitClass() {
+	            	System.out.println("getTraitClass ran. Not expected. Quit now");
+	            	System.exit(0);
+	                return int[].class;
+	            }
+
+	            
+	            public int[] getTrait(Tree tree, NodeRef node) {
+	            	System.out.println("getTrait ran. Not expected. Quit now");
+	            	System.exit(0);
+	            	return null;
+	            }
+
+
+	        });
+
+		 
+	 }
+	 
+	 
+
+	    public TreeTrait[] getTreeTraits() {
+	        return treeTraits.getTreeTraits();
+	    }
+	
+	
+	    public TreeTrait getTreeTrait(String key) {
+	    	System.out.println("not expected to run getTreeTrait. Quit now");
+	    	System.exit(0);
+	        return treeTraits.getTreeTrait(key);
+	    }
+	 
+
+	    
+
+	    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+
+	        public String getParserName() {
+	            return NODE_NUMBER_TREETRAIT;
+	        }
+
+	        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+	            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+
+	        	 return new NodeNumberTreeTrait( treeModel);
+
+	        }
+
+	        //************************************************************************
+	        // AbstractXMLObjectParser implementation
+	        //************************************************************************
+
+	        public String getParserDescription() {
+	            return "Display node number in the tree.";
+	        }
+
+	        public Class getReturnType() {
+	            return NodeNumberTreeTrait.class;
+	        }
+
+	        public XMLSyntaxRule[] getSyntaxRules() {
+	            return rules;
+	        }
+
+	        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+	            new ElementRule(TreeModel.class),
+	        };
+	    };
+	
+	
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/PathStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/PathStatistic.java
new file mode 100644
index 0000000..5ca0fe9
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/PathStatistic.java
@@ -0,0 +1,288 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class PathStatistic extends Statistic.Abstract implements VariableListener {
+
+	private  int max_dim ;
+	
+	private LinkedList<Double> path  = new LinkedList<Double>();
+	private int[] fromMembership;
+//	private Parameter clusterLabels;
+	private Parameter indicators;
+    private TreeModel treeModel;
+
+    public static final String PATH_STATISTIC = "pathStatistic";
+
+    public PathStatistic(TreeModel tree, Parameter indicators, int max_dim_in) {
+    //public PathStatistic(Parameter clusterLabels, TreeModel tree, Parameter indicators, int max_dim_in) {
+      //  this.clusterLabels = clusterLabels;
+        this.treeModel = tree;
+        this.indicators = indicators;
+        this.max_dim = max_dim_in;
+        
+      //  clusterLabels.addParameterListener(this);
+        indicators.addParameterListener(this);
+    }
+    
+
+
+    public int getDimension() {
+        return max_dim;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	if(dim ==0){
+    		//fromMembership = determine_from_membership_v2(treeModel);
+    		fromMembership = determine_from_membership_v2();
+    	}
+    	
+    	//need to figure out how many K of them.
+      
+       return ((double) fromMembership[dim]);
+
+    }
+
+    
+    
+    int[] determine_from_membership_v3(){
+    	
+    	int[] printFromCluster = new int[treeModel.getNodeCount()];
+    	for(int i=0; i < treeModel.getNodeCount(); i++){
+    		printFromCluster[i ] = -1;
+    	}
+    	
+    	
+ 	    int[] fromMembership = new int[treeModel.getNodeCount()];
+    	for(int i=0; i < treeModel.getNodeCount(); i++){
+    		fromMembership[i ] = -99;
+    	}
+    	
+    	
+    	 NodeRef root = treeModel.getRoot();
+    		
+ 	    int numClusters = 1;
+ 	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+ 	    list.addFirst(root);
+ 	
+ 	    int[] membership = new int[treeModel.getNodeCount()];
+ 	    for(int i=0; i < treeModel.getNodeCount(); i++){
+ 	    	membership[i] = -1;
+ 	    }
+ 	    membership[root.getNumber()] = 0; //root always given the first cluster
+ 	    fromMembership[0] = -1;
+ 	          
+ 	    while(!list.isEmpty()){
+ 	    	//do things with the current object
+ 	    	NodeRef curElement = list.pop();
+ 	    	
+ 	    	//cluster assignment:
+ 	    	if(!treeModel.isRoot(curElement)){
+ 	    		if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+ 	    			numClusters++ ;
+ 	    			membership[ curElement.getNumber() ] = numClusters - 1;
+ 	    			fromMembership[ curElement.getNumber() ] = membership[treeModel.getParent(curElement).getNumber()];
+ 	      	 	}
+ 	    		else{
+ 	    			//inherit from parent's cluster assignment
+ 	    			membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+ 	    			fromMembership[curElement.getNumber()] = fromMembership[treeModel.getParent(curElement).getNumber()];
+ 	    		}        	
+ 	    	}//is not Root
+ 	    	else{
+ 	    		fromMembership[ curElement.getNumber()] = -1;
+ 	    	}
+ 	
+ 	    	
+ 	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+ 	        	list.addFirst(treeModel.getChild(curElement,childNum));
+ 	        }
+ 	    }
+ 	
+ 	    
+ 	    for(int i=0; i < treeModel.getNodeCount(); i++){
+    		if( (int) indicators.getParameterValue(i ) == 1) {
+    			printFromCluster[ membership[i] ] = fromMembership[i];
+    		}
+ 	    }
+ 	    
+ 	     return(printFromCluster);
+	    
+    }
+    
+    
+    
+    //traverse down the tree, top down, do calculation
+    int[] determine_from_membership_v2(){
+	    	//note: I set MAX_DIM as the most I would print, but in order to avoid bug, I 
+    	//declare the number of nodes as the most active nodes I can have.
+    	int[] fromMembership = new int[treeModel.getNodeCount()];
+    	for(int i=0; i < treeModel.getNodeCount(); i++){
+    		fromMembership[i ] = -1;
+    	}
+    	
+	    NodeRef root = treeModel.getRoot();
+	
+	    int numClusters = 1;
+	    LinkedList<NodeRef> list = new LinkedList<NodeRef>();
+	    list.addFirst(root);
+	
+	    int[] membership = new int[treeModel.getNodeCount()];
+	    for(int i=0; i < treeModel.getNodeCount(); i++){
+	    	membership[i] = -1;
+	    }
+	    membership[root.getNumber()] = 0; //root always given the first cluster
+	          
+	    while(!list.isEmpty()){
+	    	//do things with the current object
+	    	NodeRef curElement = list.pop();
+	    	//String content = "node #" + curElement.getNumber() +", taxon=" + treeModel.getNodeTaxon(curElement) + " and parent is = " ;
+	    	String content = "node #" + curElement.getNumber() +", taxon= " ;
+	    	if(treeModel.getNodeTaxon(curElement)== null){
+	    		content += "internal node\t";
+	    	}
+	    	else{
+	    		content += treeModel.getNodeTaxon(curElement).getId() + "\t";
+	    		//content += treeModel.getTaxonIndex(treeModel.getNodeTaxon(curElement)) + "\t";
+	    	}
+	    	
+	       	if(treeModel.getParent(curElement)== null){
+	    		//content += "no parent";
+	    	}
+	    	else{
+	    		//content += "parent node#=" + treeModel.getParent(curElement).getNumber();
+	    	}
+	    	
+	    	//cluster assignment:
+	    	if(!treeModel.isRoot(curElement)){
+	    	 if( (int) indicators.getParameterValue(curElement.getNumber() ) == 1) {
+	    //		 System.out.print("indicator # " + curElement.getNumber()  + " ");
+	    		numClusters++ ;
+	    		membership[ curElement.getNumber() ] = numClusters - 1; 
+	    		fromMembership[numClusters -1] = membership[ treeModel.getParent(curElement).getNumber()];
+	    //		System.out.println("    membership " + (numClusters-1) + " assigned from " + membership[ treeModel.getParent(curElement).getNumber()] );
+	      	}
+	    	else{
+	    		//inherit from parent's cluster assignment
+	    		membership[curElement.getNumber()] = membership[treeModel.getParent(curElement).getNumber()]; 
+	    	 }
+	    	        	
+	    	}//is not Root
+	    	content += " cluster = " + membership[curElement.getNumber()] ; 
+	    	
+	    //	System.out.println(content);
+	
+	    	
+	        for(int childNum=0; childNum < treeModel.getChildCount(curElement); childNum++){
+	        	list.addFirst(treeModel.getChild(curElement,childNum));
+	        }
+	    }
+	
+	     return(fromMembership);
+   }
+
+	
+    
+    
+    
+    
+
+	//private LinkedList<Double> setPath() {
+	
+		    
+		 //return(0);
+		
+	//}
+
+
+
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    public String getDimensionName(int dim) {
+    	String name = "path" + (dim);
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+       // public final static String CLUSTERLABELS = "clusterLabels";
+        public final static String INDICATORS = "indicators";        
+        public final static String MAXDIMSTR = "maxDim";
+
+
+        public String getParserName() {
+            return PATH_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            //Parameter clusterLabels = (Parameter) xo.getElementFirstChild(CLUSTERLABELS);
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS);
+
+        	int maxDim = 30;
+        	if(xo.hasAttribute(MAXDIMSTR)){
+        		maxDim = xo.getIntegerAttribute(MAXDIMSTR);
+        	}
+
+           // return new PathStatistic(clusterLabels, treeModel, indicators, maxDim);
+        	 return new PathStatistic( treeModel, indicators, maxDim);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return PathStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            //new ElementRule(CLUSTERLABELS, Parameter.class),
+            new ElementRule(TreeModel.class),
+            new ElementRule(INDICATORS, Parameter.class),
+            AttributeRule.newDoubleRule(MAXDIMSTR, true, "the variance of mu"),
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/indicatorsStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/indicatorsStatistic.java
new file mode 100644
index 0000000..a6838f1
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/indicatorsStatistic.java
@@ -0,0 +1,94 @@
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import dr.inference.model.Parameter;
+import dr.inference.model.*;
+import dr.xml.*;
+
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class indicatorsStatistic  extends Statistic.Abstract implements VariableListener {
+	
+    private Parameter indicators;
+    
+    public static final String INDICATORS_STATISTIC = "indicatorsStatistic";
+
+    public indicatorsStatistic( Parameter indicators) {
+        this.indicators = indicators;
+        indicators.addParameterListener(this);
+    }
+    
+
+
+    public int getDimension() {
+        return indicators.getDimension();
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+        return (  indicators.getParameterValue(dim)  );
+
+    }
+
+    
+    
+     
+    
+    
+    public String getDimensionName(int dim) {
+    	String name = "indicators_" +  dim ;
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String INDICATORS_STRING = "indicators";
+
+        public String getParserName() {
+            return INDICATORS_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS_STRING);
+
+            return new indicatorsStatistic( indicators);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return indicatorsStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(INDICATORS_STRING, Parameter.class)
+
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/antigenic/phyloClustering/statistics/muStatistic.java b/src/dr/evomodel/antigenic/phyloClustering/statistics/muStatistic.java
new file mode 100644
index 0000000..9b27a22
--- /dev/null
+++ b/src/dr/evomodel/antigenic/phyloClustering/statistics/muStatistic.java
@@ -0,0 +1,118 @@
+
+package dr.evomodel.antigenic.phyloClustering.statistics;
+
+import java.util.LinkedList;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ *  @author Charles Cheung
+ * @author Trevor Bedford
+ */
+
+public class muStatistic extends Statistic.Abstract implements VariableListener {
+
+	static  int MAX_DIM = 30;
+	
+
+	private MatrixParameter mu;
+    private TreeModel treeModel;
+    private Parameter indicators;
+    
+    public static final String MU_STATISTIC = "muStatistic";
+
+    public muStatistic( TreeModel tree, MatrixParameter mu, Parameter indicators) {
+        
+        this.treeModel = tree;
+        this.mu = mu;
+        this.indicators = indicators;
+        
+        mu.addParameterListener(this);
+        indicators.addParameterListener(this);
+    }
+    
+
+
+    public int getDimension() {
+        return treeModel.getNodeCount()*2;
+    }
+
+
+
+    //assume print in order... so before printing the first number, 
+    //determine all the nodes that are active.
+    public double getStatisticValue(int dim) {
+
+    	int curNode = dim/2;
+    	double value = mu.getParameter(curNode).getParameterValue(dim % 2);
+    	
+    	//if((int) indicators.getParameterValue(curNode)  == 0){
+    	//	value = 0;
+    	//}
+          return (  value );
+
+    }
+
+    
+    
+     
+    
+    
+    public String getDimensionName(int dim) {
+    	String name = "mu_" +  ((dim/2) )  + "-" + ((dim %2 ) +1 );
+        return name;
+    }
+
+    public void variableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
+        // do nothing
+    	//System.out.println("hi got printed");
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public final static String MU_STRING = "mu";
+        public final static String INDICATORS_STRING = "indicators";
+
+        public String getParserName() {
+            return MU_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+            TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+            MatrixParameter muParam = (MatrixParameter) xo.getElementFirstChild(MU_STRING);
+            Parameter indicators = (Parameter) xo.getElementFirstChild(INDICATORS_STRING);
+
+            return new muStatistic( treeModel, muParam, indicators);
+
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a statistic that shifts a matrix of locations by location drift in the first dimension.";
+        }
+
+        public Class getReturnType() {
+            return muStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            new ElementRule(TreeModel.class),
+            new ElementRule(MU_STRING, Parameter.class),
+            new ElementRule(INDICATORS_STRING, Parameter.class)
+
+        };
+    };
+
+    
+
+}
diff --git a/src/dr/evomodel/arg/ARGRelaxedClock.java b/src/dr/evomodel/arg/ARGRelaxedClock.java
index 3184df7..c964010 100644
--- a/src/dr/evomodel/arg/ARGRelaxedClock.java
+++ b/src/dr/evomodel/arg/ARGRelaxedClock.java
@@ -29,8 +29,6 @@ import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.arg.ARGModel.Node;
 import dr.evomodel.branchratemodel.AbstractBranchRateModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/branchratemodel/ContinuousBranchRates.java b/src/dr/evomodel/branchratemodel/ContinuousBranchRates.java
index 875d72b..7265a39 100644
--- a/src/dr/evomodel/branchratemodel/ContinuousBranchRates.java
+++ b/src/dr/evomodel/branchratemodel/ContinuousBranchRates.java
@@ -26,13 +26,13 @@
 package dr.evomodel.branchratemodel;
 
 import dr.evolution.tree.NodeRef;
-import dr.evolution.tree.SimpleTree;
 import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodel.tree.TreeParameterModel;
 import dr.evomodelxml.branchratemodel.ContinuousBranchRatesParser;
 import dr.inference.distribution.ParametricDistributionModel;
 import dr.inference.model.*;
+import dr.math.MathUtils;
 
 /**
  *
@@ -48,44 +48,35 @@ public class ContinuousBranchRates extends AbstractBranchRateModel {
 
     private final ParametricDistributionModel distributionModel;
 
-    // The rate categories of each branch
-    //d final TreeParameterModel rateCategories;
+    // The rate quantiles of each branch
     final TreeParameterModel rateCategoryQuantiles;
 
-    //private final int categoryCount;
-    //private final double step;
     private final double[] rates;
     private boolean normalize = false;
     private double normalizeBranchRateTo = Double.NaN;
     private double scaleFactor = 1.0;
     private TreeModel treeModel;
 
-    private Tree tree;
-
-    //overSampling control the number of effective categories
+    private boolean updateScaleFactor = false;
+    private boolean updateRates = true;
 
     public ContinuousBranchRates(
             TreeModel tree,
-            /*Parameter rateCategoryParameter, */
             Parameter rateCategoryQuantilesParameter,
-            ParametricDistributionModel model
-            /*int overSampling*/) {
-        this(tree, /* rateCategoryParameter, */rateCategoryQuantilesParameter, model, /*overSampling, */false, Double.NaN);
+            ParametricDistributionModel model) {
+        this(tree, rateCategoryQuantilesParameter, model, false, Double.NaN);
 
     }
 
     public ContinuousBranchRates(
             TreeModel tree,
-            /* Parameter rateCategoryParameter, */
             Parameter rateCategoryQuantilesParameter,
             ParametricDistributionModel model,
-            /*int overSampling,*/
             boolean normalize,
             double normalizeBranchRateTo) {
 
         super(ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES);
 
-        //d this.rateCategories = new TreeParameterModel(tree, rateCategoryParameter, false);
         this.rateCategoryQuantiles = new TreeParameterModel(tree, rateCategoryQuantilesParameter, false);
 
         rates = new double[tree.getNodeCount()];
@@ -96,149 +87,110 @@ public class ContinuousBranchRates extends AbstractBranchRateModel {
         this.distributionModel = model;
         this.normalizeBranchRateTo = normalizeBranchRateTo;
 
-        this.tree = new SimpleTree(tree);
-
-        //Force the boundaries of rateCategoryParameter to match the category count
-        //d Parameter.DefaultBounds bound = new Parameter.DefaultBounds(categoryCount - 1, 0, rateCategoryParameter.getDimension());
-        //d rateCategoryParameter.addBounds(bound);
-        //rateCategoryQuantilesParameter.;
-
-
-
         Parameter.DefaultBounds bound = new Parameter.DefaultBounds(1.0, 0.0, rateCategoryQuantilesParameter.getDimension());
         rateCategoryQuantilesParameter.addBounds(bound);
-        /*if(rateCategoryQuantilesParameter.getBounds()==null) {
-            System.out.println("oh NO!!! " + rateCategoryQuantilesParameter.getBounds().getLowerLimit(0) + "\t"
-             + rateCategoryQuantilesParameter.getBounds().getUpperLimit(1));
-        }*/
-
 
-        /* for (int i = 0; i < rateCategoryParameter.getDimension(); i++) {
-            int index = (int) Math.floor((i + 0.5) * overSampling);
-            rateCategoryParameter.setParameterValue(i, index);
-        } */
+        randomizeRates();
 
         addModel(model);
-        // AR - commented out: changes to the tree are handled by model changed events fired by rateCategories
-//        addModel(tree);
-        //d addModel(rateCategories);
-
         addModel(rateCategoryQuantiles);
 
-        //addModel(treeModel); // Maybe
-        // AR - commented out: changes to rateCategoryParameter are handled by model changed events fired by rateCategories
-//        addVariable(rateCategoryParameter);
-
         if (normalize) {
-            tree.addModelListener(new ModelListener() {
-
-                public void modelChangedEvent(Model model, Object object, int index) {
-                    computeFactor();
-                }
-
-                public void modelRestored(Model model) {
-                    computeFactor();
-                }
-            });
+            // if we want to normalize the rates then we need to listen for changes on the tree
+            addModel(treeModel);
+            updateScaleFactor = true;
         }
 
-        setupRates();
-    }
-
-    // compute scale factor
-    private void computeFactor() {
-
-        //scale mean rate to 1.0 or separate parameter
-
-        double treeRate = 0.0;
-        double treeTime = 0.0;
-
-        //normalizeBranchRateTo = 1.0;
-        for (int i = 0; i < treeModel.getNodeCount(); i++) {
-            NodeRef node = treeModel.getNode(i);
-            if (!treeModel.isRoot(node)) {
-//d                int rateCategory = (int) Math.round(rateCategories.getNodeValue(treeModel, node));
-//d                 treeRate += rates[rateCategory] * treeModel.getBranchLength(node);
-                treeTime += treeModel.getBranchLength(node);
-
-// d              System.out.println("rates and time\t" + rates[rateCategory] + "\t" + treeModel.getBranchLength(node));
-            }
-        }
-        //treeRate /= treeTime;
-
-        scaleFactor = normalizeBranchRateTo / (treeRate / treeTime);
-        System.out.println("scaleFactor\t\t\t\t\t" + scaleFactor);
+        updateRates = true;
     }
 
     public void handleModelChangedEvent(Model model, Object object, int index) {
         if (model == distributionModel) {
-            setupRates();
+            updateRates = true;
             fireModelChanged();
-        } //else if (model == rateCategories) {
-        // AR - commented out: if just the rate categories have changed the rates will be the same
-//            setupRates();
-        //  fireModelChanged(null, index);
-        //}
-        else if (model == rateCategoryQuantiles) {
-            setupRates();   // Maybe
-            //rateCategories.fireModelChanged();
+        } else if (model == rateCategoryQuantiles) {
+            updateRates = true;
             fireModelChanged(null, index);
-        } /*else if (model == treeModel) {
-            setupRates(); // Maybe
-        }*/
+        } else if (model == treeModel && normalize) {
+            updateScaleFactor = true;
+        }
     }
 
     protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
-        // AR - commented out: changes to rateCategoryParameter are handled by model changed events
-        //setupRates();   // Maybe
     }
 
     protected void storeState() {
-        //setupRates();   // Maybe
     }
 
     protected void restoreState() {
-        setupRates();
+        updateRates = true;
     }
 
     protected void acceptState() {
-        //setupRates();   // Maybe
     }
 
     public double getBranchRate(final Tree tree, final NodeRef node) {
 
         assert !tree.isRoot(node) : "root node doesn't have a rate!";
 
-        //int rateCategory = (int) Math.round(rateCategories.getNodeValue(tree, node));
-        //System.out.println("dslkjafsdf " + node.getNumber());
+        if (updateRates) {
+            computeRates();
+        }
+
+        if (updateScaleFactor) {
+            computeFactor();
+        }
+
         return rates[node.getNumber()] * scaleFactor;
     }
 
     /**
-     * Calculates the actual rates corresponding to the category indices.
+     * Calculates the actual rates corresponding to the quantiles.
      */
-    protected void setupRates() {
+    private void randomizeRates() {
+
+        for (int i = 0; i < treeModel.getNodeCount(); i++) {
+            if (!treeModel.isRoot(treeModel.getNode(i))) {
+                double r = MathUtils.nextDouble();
+                rateCategoryQuantiles.setNodeValue(treeModel, treeModel.getNode(i), r);
+            }
+        }
+
+        updateRates = false;
+    }
 
-        //rateCategoryQuantiles.
+    /**
+     * Calculates the actual rates corresponding to the quantiles.
+     */
+    private void computeRates() {
 
-        //double z = step / 2.0;
-        for (int i = 0; i < tree.getNodeCount(); i++) {
-            //rates[i] = distributionModel.quantile(rateCategoryQuantiles.getNodeValue(rateCategoryQuantiles.getTreeModel(), rateCategoryQuantiles.getTreeModel().getNode(i) ));
-            if (!tree.isRoot(tree.getNode(i))) {
-                //System.out.println(rateCategoryQuantiles.getNodeValue(tree, tree.getNode(i)));
-                rates[tree.getNode(i).getNumber()] = distributionModel.quantile(rateCategoryQuantiles.getNodeValue(tree, tree.getNode(i)));
+        for (int i = 0; i < treeModel.getNodeCount(); i++) {
+            if (!treeModel.isRoot(treeModel.getNode(i))) {
+                rates[treeModel.getNode(i).getNumber()] = distributionModel.quantile(rateCategoryQuantiles.getNodeValue(treeModel, treeModel.getNode(i)));
             }
+        }
 
-//            System.out.println("road " + i + "\t" + rateCategoryQuantiles.getNodeValue(tree, tree.getNode(i) ) + "\t" + tree.toString() + "\t" + tree.toString());
-            //System.out.println("road " + i + "\t" + rateCategoryQuantiles.getNodeValue(rateCategoryQuantiles.getTreeModel(), rateCategoryQuantiles.getTreeModel().getNode(i) ) + "\t" + rateCategoryQuantiles.getTreeModel().toString() + "\t" + treeModel.toString());
+        updateRates = false;
+    }
 
-            //System.out.print(rates[i]+"\t");
-            //z += step;
-            //rates[i] = distributionModel.quantile(z);
-            //System.out.println("ape " + tree);
+    // compute scale factor
+    private void computeFactor() {
+
+        //scale mean rate to 1.0 or separate parameter
+
+        double treeRate = 0.0;
+        double treeTime = 0.0;
+
+        for (int i = 0; i < treeModel.getNodeCount(); i++) {
+            NodeRef node = treeModel.getNode(i);
+            if (!treeModel.isRoot(node)) {
+                treeTime += treeModel.getBranchLength(node);
+            }
         }
-        //System.out.println("\n");
-        //System.out.println();
-        if (normalize) computeFactor();
+
+        scaleFactor = normalizeBranchRateTo / (treeRate / treeTime);
+
+        updateScaleFactor = false;
     }
+
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/branchratemodel/DecayingRateModel.java b/src/dr/evomodel/branchratemodel/DecayingRateModel.java
index faede21..0af5f96 100644
--- a/src/dr/evomodel/branchratemodel/DecayingRateModel.java
+++ b/src/dr/evomodel/branchratemodel/DecayingRateModel.java
@@ -28,7 +28,6 @@ package dr.evomodel.branchratemodel;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/branchratemodel/DiscreteTraitBranchRateModel.java b/src/dr/evomodel/branchratemodel/DiscreteTraitBranchRateModel.java
index 3206ba6..419f1e2 100644
--- a/src/dr/evomodel/branchratemodel/DiscreteTraitBranchRateModel.java
+++ b/src/dr/evomodel/branchratemodel/DiscreteTraitBranchRateModel.java
@@ -37,7 +37,6 @@ import dr.evomodel.tree.TreeModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
-import dr.math.matrixAlgebra.Vector;
 
 /**
  * This Branch Rate Model takes a ancestral state likelihood and
diff --git a/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java b/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
index 016959f..7fe0987 100644
--- a/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
+++ b/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
@@ -48,7 +48,7 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
     // a restore.
     // Currently turned off as it is not working with multiple partitions for
     // some reason.
-    private static final boolean CACHE_RATES = false;
+    private static final boolean DEFAULT_CACHE_RATES = false;
 
     private final ParametricDistributionModel distributionModel;
 
@@ -71,6 +71,8 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
     private int currentRateArrayIndex = 0;
     private int storedRateArrayIndex;
 
+    private boolean cacheRates = DEFAULT_CACHE_RATES;
+
     //overSampling control the number of effective categories
 
     public DiscretizedBranchRates(
@@ -78,7 +80,7 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
             Parameter rateCategoryParameter,
             ParametricDistributionModel model,
             int overSampling) {
-        this(tree, rateCategoryParameter, model, overSampling, false, Double.NaN, false, false);
+        this(tree, rateCategoryParameter, model, overSampling, false, Double.NaN, false, false, DEFAULT_CACHE_RATES);
 
     }
 
@@ -90,10 +92,13 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
             boolean normalize,
             double normalizeBranchRateTo,
             boolean randomizeRates,
-            boolean keepRates) {
+            boolean keepRates,
+            boolean cacheRates) {
 
         super(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES);
 
+        this.cacheRates = cacheRates;
+
         this.rateCategories = new TreeParameterModel(tree, rateCategoryParameter, false);
 
         categoryCount = (tree.getNodeCount() - 1) * overSampling;
@@ -143,7 +148,7 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
         for (int i = 0; i < treeModel.getNodeCount(); i++) {
             NodeRef node = treeModel.getNode(i);
             if (!treeModel.isRoot(node)) {
-                int rateCategory = (int) Math.round(rateCategories.getNodeValue(treeModel, node));
+                int rateCategory = (int) (rateCategories.getNodeValue(treeModel, node) + 0.5);
                 treeRate += rates[currentRateArrayIndex][rateCategory] * treeModel.getBranchLength(node);
                 treeTime += treeModel.getBranchLength(node);
 
@@ -170,14 +175,14 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
    }
 
     protected void storeState() {
-        if (CACHE_RATES) {
+        if (cacheRates) {
             storedRateArrayIndex = currentRateArrayIndex;
             storedScaleFactor = scaleFactor;
         }
     }
 
     protected void restoreState() {
-        if (CACHE_RATES) {
+        if (cacheRates) {
             currentRateArrayIndex = storedRateArrayIndex;
             scaleFactor = storedScaleFactor;
         } else {
@@ -196,7 +201,7 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
             setupRates();
         }
 
-        int rateCategory = (int) Math.round(rateCategories.getNodeValue(tree, node));
+        int rateCategory = (int) (rateCategories.getNodeValue(tree, node) + 0.5);
 
         //System.out.println(rates[rateCategory] + "\t"  + rateCategory);
         return rates[currentRateArrayIndex][rateCategory] * scaleFactor;
@@ -207,7 +212,7 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
      */
     private void setupRates() {
 
-        if (CACHE_RATES) {
+        if (cacheRates) {
             // flip the current array index
             currentRateArrayIndex = 1 - currentRateArrayIndex;
         }
diff --git a/src/dr/evomodel/branchratemodel/RateEpochBranchRateModel.java b/src/dr/evomodel/branchratemodel/RateEpochBranchRateModel.java
index 4d2d823..493eae4 100644
--- a/src/dr/evomodel/branchratemodel/RateEpochBranchRateModel.java
+++ b/src/dr/evomodel/branchratemodel/RateEpochBranchRateModel.java
@@ -28,7 +28,6 @@ package dr.evomodel.branchratemodel;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodelxml.branchratemodel.RateEpochBranchRateModelParser;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/branchratemodel/ScaledTreeLengthRateModel.java b/src/dr/evomodel/branchratemodel/ScaledTreeLengthRateModel.java
index 52c5845..54903f1 100644
--- a/src/dr/evomodel/branchratemodel/ScaledTreeLengthRateModel.java
+++ b/src/dr/evomodel/branchratemodel/ScaledTreeLengthRateModel.java
@@ -29,7 +29,6 @@ import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodelxml.branchratemodel.ScaledTreeLengthRateModelParser;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/branchratemodel/StrictClockBranchRates.java b/src/dr/evomodel/branchratemodel/StrictClockBranchRates.java
index 1ca1e02..38e1557 100644
--- a/src/dr/evomodel/branchratemodel/StrictClockBranchRates.java
+++ b/src/dr/evomodel/branchratemodel/StrictClockBranchRates.java
@@ -28,7 +28,6 @@ package dr.evomodel.branchratemodel;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/branchratemodel/TipBranchRateModel.java b/src/dr/evomodel/branchratemodel/TipBranchRateModel.java
index 8f3c700..ec8328c 100644
--- a/src/dr/evomodel/branchratemodel/TipBranchRateModel.java
+++ b/src/dr/evomodel/branchratemodel/TipBranchRateModel.java
@@ -27,7 +27,6 @@ package dr.evomodel.branchratemodel;
 
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/clock/RateEvolutionLikelihood.java b/src/dr/evomodel/clock/RateEvolutionLikelihood.java
index e04e098..a68268f 100644
--- a/src/dr/evomodel/clock/RateEvolutionLikelihood.java
+++ b/src/dr/evomodel/clock/RateEvolutionLikelihood.java
@@ -28,10 +28,8 @@ package dr.evomodel.clock;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.branchratemodel.AbstractBranchRateModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodel.tree.TreeParameterModel;
-import dr.inference.model.AbstractModelLikelihood;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/clock/UniversalClock.java b/src/dr/evomodel/clock/UniversalClock.java
index a7f6992..968aa00 100644
--- a/src/dr/evomodel/clock/UniversalClock.java
+++ b/src/dr/evomodel/clock/UniversalClock.java
@@ -28,8 +28,6 @@ package dr.evomodel.clock;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.branchratemodel.AbstractBranchRateModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/coalescent/AbstractCoalescentLikelihood.java b/src/dr/evomodel/coalescent/AbstractCoalescentLikelihood.java
index 5a476f8..29f3a72 100644
--- a/src/dr/evomodel/coalescent/AbstractCoalescentLikelihood.java
+++ b/src/dr/evomodel/coalescent/AbstractCoalescentLikelihood.java
@@ -84,6 +84,8 @@ public abstract class AbstractCoalescentLikelihood extends AbstractModelLikeliho
         storedIntervals = new Intervals(tree.getNodeCount());
         eventsKnown = false;
 
+        this.coalescentEventStatisticValues = new double[getNumberOfCoalescentEvents()];
+
         addStatistic(new DeltaStatistic());
 
         likelihoodKnown = false;
@@ -267,6 +269,10 @@ public abstract class AbstractCoalescentLikelihood extends AbstractModelLikeliho
         return intervals.getIntervalCount();
     }
 
+    public int getNumberOfCoalescentEvents() {
+        return tree.getExternalNodeCount()-1;
+    }
+
     public int getCoalescentIntervalLineageCount(int i) {
         if (!eventsKnown) {
             setupIntervals();
@@ -281,6 +287,24 @@ public abstract class AbstractCoalescentLikelihood extends AbstractModelLikeliho
         return intervals.getIntervalType(i);
     }
 
+    public double getCoalescentEventsStatisticValue(int i) {
+        if (i == 0) {
+            for (int j = 0; j < coalescentEventStatisticValues.length; j++) {
+                coalescentEventStatisticValues[j] = 0.0;
+            }
+            int counter = 0;
+            for (int j = 0; j < getCoalescentIntervalDimension(); j++) {
+                if (getCoalescentIntervalType(j) == IntervalType.COALESCENT) {
+                    this.coalescentEventStatisticValues[counter] += getCoalescentInterval(j) * (getCoalescentIntervalLineageCount(j) * (getCoalescentIntervalLineageCount(j) - 1.0)) / 2.0;
+                    counter++;
+                } else {
+                    this.coalescentEventStatisticValues[counter] += getCoalescentInterval(j) * (getCoalescentIntervalLineageCount(j) * (getCoalescentIntervalLineageCount(j) - 1.0)) / 2.0;
+                }
+            }
+        }
+        return coalescentEventStatisticValues[i];
+    }
+
     public String toString() {
         return Double.toString(logLikelihood);
 
@@ -335,4 +359,6 @@ public abstract class AbstractCoalescentLikelihood extends AbstractModelLikeliho
     private double storedLogLikelihood;
     protected boolean likelihoodKnown = false;
     private boolean storedLikelihoodKnown = false;
+
+    private double[] coalescentEventStatisticValues;
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/AsymptoticGrowthModel.java b/src/dr/evomodel/coalescent/AsymptoticGrowthModel.java
index 7522ee4..fb4d51c 100644
--- a/src/dr/evomodel/coalescent/AsymptoticGrowthModel.java
+++ b/src/dr/evomodel/coalescent/AsymptoticGrowthModel.java
@@ -28,7 +28,6 @@ package dr.evomodel.coalescent;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.FlexibleGrowth;
 import dr.evomodelxml.coalescent.AsymptoticGrowthModelParser;
-import dr.evomodelxml.coalescent.LogisticGrowthModelParser;
 import dr.inference.model.Parameter;
 
 /**
diff --git a/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java b/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
index b7a6c4d..3af2d71 100644
--- a/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
+++ b/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
@@ -44,15 +44,17 @@ public class CataclysmicDemographicModel extends DemographicModel {
     /**
      * Construct demographic model with default settings
      */
-    public CataclysmicDemographicModel(Parameter N0Parameter, Parameter N1Parameter, Parameter growthRateParameter, Parameter timeParameter, Type units) {
+    public CataclysmicDemographicModel(Parameter N0Parameter, Parameter N1Parameter, Parameter growthRateParameter, Parameter timeParameter,
+                Type units, boolean useSpike) {
 
-        this(CataclysmicDemographicModelParser.CATACLYSM_MODEL, N0Parameter, N1Parameter, growthRateParameter, timeParameter, units);
+        this(CataclysmicDemographicModelParser.CATACLYSM_MODEL, N0Parameter, N1Parameter, growthRateParameter, timeParameter, units, useSpike);
     }
 
     /**
      * Construct demographic model with default settings
      */
-    public CataclysmicDemographicModel(String name, Parameter N0Parameter, Parameter N1Parameter, Parameter growthRateParameter, Parameter timeParameter, Type units) {
+    public CataclysmicDemographicModel(String name, Parameter N0Parameter, Parameter secondParam, Parameter growthRateParameter,
+                Parameter timeParameter, Type units, boolean useSpike) {
 
         super(name);
 
@@ -62,9 +64,14 @@ public class CataclysmicDemographicModel extends DemographicModel {
         addVariable(N0Parameter);
         N0Parameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
 
-        this.N1Parameter = N1Parameter;
-        addVariable(N1Parameter);
-        N1Parameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+        if (useSpike) {
+            this.N1Parameter = secondParam;
+            N1Parameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+            addVariable(N1Parameter);
+        } else {
+            this.declineRateParameter = secondParam;
+            addVariable(declineRateParameter);
+        }
 
         this.growthRateParameter = growthRateParameter;
         addVariable(growthRateParameter);
@@ -95,10 +102,10 @@ public class CataclysmicDemographicModel extends DemographicModel {
           */ // ..collapse to...
 
         double t = timeParameter.getParameterValue(0);
-        double declineRate = Math.log(N1Parameter.getParameterValue(0)) / t;
+        double declineRate = (declineRateParameter == null) ? Math.log(N1Parameter.getParameterValue(0)) / t :
+                declineRateParameter.getParameterValue(0);
         cataclysm.setDeclineRate(declineRate);
 
-
         return cataclysm;
     }
 
@@ -110,5 +117,6 @@ public class CataclysmicDemographicModel extends DemographicModel {
     Parameter N1Parameter = null;
     Parameter growthRateParameter = null;
     Parameter timeParameter = null;
+    Parameter declineRateParameter = null;
     CataclysmicDemographic cataclysm = null;
 }
diff --git a/src/dr/evomodel/coalescent/CoalescentEventsStatistic.java b/src/dr/evomodel/coalescent/CoalescentEventsStatistic.java
index 4376630..95f695e 100644
--- a/src/dr/evomodel/coalescent/CoalescentEventsStatistic.java
+++ b/src/dr/evomodel/coalescent/CoalescentEventsStatistic.java
@@ -25,7 +25,7 @@
 
 package dr.evomodel.coalescent;
 
-import dr.evolution.coalescent.IntervalType;
+//import dr.evolution.coalescent.IntervalType;
 import dr.inference.model.Statistic;
 
 /**
@@ -33,40 +33,54 @@ import dr.inference.model.Statistic;
  */
 public class CoalescentEventsStatistic extends Statistic.Abstract {
 
-    public static final boolean DEBUG = true;
+    //public static final boolean TEST_NEW_CODE = true;
 
-    private static final boolean FULL_FINAL_INTERVAL = true;
-    private static final boolean LOG_COMBINATIONS = true;
-    private static final boolean RETURN_RECIPROCAL = false;
+    public static final boolean DEBUG = false;
+
+    //private static final boolean FULL_FINAL_INTERVAL = true;
+    //private static final boolean LOG_COMBINATIONS = true;
+    //private static final boolean RETURN_RECIPROCAL = false;
 
     private final CoalescentIntervalProvider coalescent;
     //treeModel currently only required for debugging purposes
     //private TreeModel treeModel;
     private int coalescentEvents;
-    private double[] coalescentValues;
+    //private double[] coalescentValues;
 
     //public CoalescentEventsStatistic(CoalescentIntervalProvider coalescent, TreeModel treeModel) {
     public CoalescentEventsStatistic(CoalescentIntervalProvider coalescent) {
+
         this.coalescent = coalescent;
-        //this.treeModel = treeModel;
-        this.coalescentEvents = 0;
-        if (DEBUG) {
-            System.err.println("CoalescentIntervalDimension: " + coalescent.getCoalescentIntervalDimension());
-        }
-        if (coalescent instanceof GMRFSkyrideLikelihood) {
-            this.coalescentEvents = coalescent.getCoalescentIntervalDimension();
-        } else {
-            for (int i = 0; i < coalescent.getCoalescentIntervalDimension(); i++) {
-                //Not yet implemented for the skygrid model
-                if (coalescent.getCoalescentIntervalType(i) == IntervalType.COALESCENT) {
-                    coalescentEvents++;
+
+        //if (TEST_NEW_CODE) {
+
+        this.coalescentEvents = coalescent.getNumberOfCoalescentEvents();
+
+        /*} else {
+
+            this.coalescentEvents = 0;
+            if (coalescent instanceof GMRFSkyrideLikelihood) {
+                this.coalescentEvents = coalescent.getCoalescentIntervalDimension();
+                if (DEBUG) {
+                    System.err.println("CoalescentIntervalDimension: " + coalescent.getCoalescentIntervalDimension());
+                }
+            } else {
+                for (int i = 0; i < coalescent.getCoalescentIntervalDimension(); i++) {
+                    //Not yet implemented for the skygrid model
+                    if (coalescent.getCoalescentIntervalType(i) == IntervalType.COALESCENT) {
+                        coalescentEvents++;
+                    }
                 }
             }
-        }
-        //System.err.println("Number of coalescent events: " + this.coalescentEvents);
-        this.coalescentValues = new double[coalescentEvents];
+            if (DEBUG) {
+                System.err.println("Number of coalescent events: " + this.coalescentEvents);
+            }
+
+        }*/
+
+        //this.coalescentValues = new double[coalescentEvents];
         if (DEBUG) {
-            System.err.println("CoalescentEventsStatistic constructor: " + this.coalescentEvents);
+            System.err.println("CoalescentEventsStatistic dimension: " + this.coalescentEvents);
         }
     }
 
@@ -75,72 +89,122 @@ public class CoalescentEventsStatistic extends Statistic.Abstract {
     }
 
     public double getStatisticValue(int i) {
-        //System.err.println(treeModel);
-        //i will go from 0 to getDimension()
-        if (i == 0) {
-            //reset array of coalescent events
-            for (int j = 0; j < coalescentValues.length; j++) {
-                coalescentValues[j] = 0.0;
+
+        //if (TEST_NEW_CODE) {
+
+            return coalescent.getCoalescentEventsStatisticValue(i);
+
+        /*} else {
+
+            //System.err.println(treeModel);
+            //i will go from 0 to getDimension()
+            //GMRFSkyrideLikelihood
+            if (DEBUG) {
+                System.err.println("getStatisticValue(int i)");
             }
-            //recalculate everything
-            int counter = 0;
-            for (int j = 0; j < coalescent.getCoalescentIntervalDimension(); j++) {
-                if (coalescent instanceof GMRFSkyrideLikelihood) {
-                    this.coalescentValues[counter] = ((GMRFSkyrideLikelihood)coalescent).getSufficientStatistics()[j];
-                } else {
-                    //System.err.println(coalescent.getCoalescentIntervalType(j) + "   " + coalescent.getCoalescentInterval(j));
-                    if (coalescent.getCoalescentIntervalType(j) == IntervalType.COALESCENT) {
-                        if (LOG_COMBINATIONS) {
-                            this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
-                            //System.err.println("interval length: " + coalescent.getCoalescentInterval(j));
-                            //System.err.println("lineage count: " + coalescent.getCoalescentIntervalLineageCount(j));
-                            //System.err.println("factorial: " + (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/2.0);
-                            //System.err.println("counter " + counter + ": " + this.coalescentValues[counter] + "\n");
-                            //this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
-                            //this.coalescentValues[counter] = (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/(2.0*this.coalescentValues[counter]);
-                        } else {
-                            this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
-                        }
-                        counter++;
-                    } else if (!FULL_FINAL_INTERVAL) {
-                        if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE && counter != 0) {
+            if (i == 0) {
+                if (DEBUG) {
+                    System.err.println("coalescentValues.length = " + coalescentValues.length);
+                }
+                //reset array of coalescent events
+                for (int j = 0; j < coalescentValues.length; j++) {
+                    coalescentValues[j] = 0.0;
+                }
+                //recalculate everything
+                int counter = 0;
+                if (DEBUG) {
+                    System.err.println("coalescent.getCoalescentIntervalDimension() = " + coalescent.getCoalescentIntervalDimension());
+                }
+                for (int j = 0; j < coalescent.getCoalescentIntervalDimension(); j++) {
+                    if (coalescent instanceof GMRFMultilocusSkyrideLikelihood) {
+                        if (coalescent.getCoalescentIntervalType(j) == IntervalType.COALESCENT) {
                             if (LOG_COMBINATIONS) {
                                 this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
-                                //System.err.println("interval length: " + coalescent.getCoalescentInterval(j));
-                                //System.err.println("lineage count: " + coalescent.getCoalescentIntervalLineageCount(j));
-                                //System.err.println("factorial: " + (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/2.0);
-                                //System.err.println("counter " + counter + ": " + this.coalescentValues[counter] + "\n");
-                                //this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
                             } else {
                                 this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
                             }
+                            counter++;
+                        } else if (!FULL_FINAL_INTERVAL) {
+                            if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE && counter != 0) {
+                                if (LOG_COMBINATIONS) {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
+                                } else {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                }
+                            }
+                        } else {
+                            if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE) {
+                                if (LOG_COMBINATIONS) {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
+                                } else {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                }
+                            }
+                        }
+                    } else if (coalescent instanceof GMRFSkyrideLikelihood) {
+                        if (DEBUG) {
+                            System.err.println("counter = " + counter);
+                            System.err.println("((GMRFSkyrideLikelihood)coalescent).getSufficientStatistics()[" + j + "] = " + ((GMRFSkyrideLikelihood)coalescent).getSufficientStatistics()[j]);
                         }
+                        this.coalescentValues[counter] = ((GMRFSkyrideLikelihood)coalescent).getSufficientStatistics()[j];
+                        counter++;
                     } else {
-                        if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE) {
+                        //System.err.println(coalescent.getCoalescentIntervalType(j) + "   " + coalescent.getCoalescentInterval(j));
+                        if (coalescent.getCoalescentIntervalType(j) == IntervalType.COALESCENT) {
                             if (LOG_COMBINATIONS) {
+                                this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
                                 //System.err.println("interval length: " + coalescent.getCoalescentInterval(j));
                                 //System.err.println("lineage count: " + coalescent.getCoalescentIntervalLineageCount(j));
                                 //System.err.println("factorial: " + (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/2.0);
                                 //System.err.println("counter " + counter + ": " + this.coalescentValues[counter] + "\n");
-                                this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
                                 //this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                //this.coalescentValues[counter] = (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/(2.0*this.coalescentValues[counter]);
                             } else {
                                 this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
                             }
+                            counter++;
+                        } else if (!FULL_FINAL_INTERVAL) {
+                            if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE && counter != 0) {
+                                if (LOG_COMBINATIONS) {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
+                                    //System.err.println("interval length: " + coalescent.getCoalescentInterval(j));
+                                    //System.err.println("lineage count: " + coalescent.getCoalescentIntervalLineageCount(j));
+                                    //System.err.println("factorial: " + (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/2.0);
+                                    //System.err.println("counter " + counter + ": " + this.coalescentValues[counter] + "\n");
+                                    //this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                } else {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                }
+                            }
+                        } else {
+                            if (coalescent.getCoalescentIntervalType(j) == IntervalType.SAMPLE) {
+                                if (LOG_COMBINATIONS) {
+                                    //System.err.println("interval length: " + coalescent.getCoalescentInterval(j));
+                                    //System.err.println("lineage count: " + coalescent.getCoalescentIntervalLineageCount(j));
+                                    //System.err.println("factorial: " + (coalescent.getCoalescentIntervalLineageCount(j)*coalescent.getCoalescentIntervalLineageCount(j)-1.0)/2.0);
+                                    //System.err.println("counter " + counter + ": " + this.coalescentValues[counter] + "\n");
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j)*(coalescent.getCoalescentIntervalLineageCount(j)*(coalescent.getCoalescentIntervalLineageCount(j)-1.0))/2.0;
+                                    //this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                } else {
+                                    this.coalescentValues[counter] += coalescent.getCoalescentInterval(j);
+                                }
+                            }
                         }
                     }
                 }
             }
-        }
-    	/*for (int j = 0; j < this.coalescentEvents; j++) {
-    		System.err.println(this.coalescentValues[j]);
-    	}
-     	System.exit(0);*/
-        if (RETURN_RECIPROCAL) {
-            return 1.0/this.coalescentValues[i];
-        } else {
-            return this.coalescentValues[i];
-        }
+            //for (int j = 0; j < this.coalescentEvents; j++) {
+            //	System.err.println(this.coalescentValues[j]);
+            //}
+            //System.exit(0);
+            if (RETURN_RECIPROCAL) {
+                return 1.0/this.coalescentValues[i];
+            } else {
+                return this.coalescentValues[i];
+            }
+
+        }*/
+
     }
 
     public String getStatisticName() {
diff --git a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java b/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
index 4de2a8e..45c86f6 100644
--- a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
+++ b/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
@@ -29,15 +29,23 @@ import dr.evolution.coalescent.IntervalType;
 
 /**
  * @author Guy Baele
- * @author Marc Suchard
+ * @author Marc A. Suchard
  */
 public interface CoalescentIntervalProvider {
 
-    public int getCoalescentIntervalDimension();
+    //Old interface does not require the coalescent to do the actual calculations
+    /*public int getCoalescentIntervalDimension();
 
     public double getCoalescentInterval(int i);
 
     public int getCoalescentIntervalLineageCount(int i);
 
-    public IntervalType getCoalescentIntervalType(int i);
+    public IntervalType getCoalescentIntervalType(int i);*/
+
+
+    //New interface
+    public int getNumberOfCoalescentEvents();
+
+    public double getCoalescentEventsStatisticValue(int i);
+
 }
diff --git a/src/dr/evomodel/coalescent/CoalescentIntervalStatistic.java b/src/dr/evomodel/coalescent/CoalescentIntervalStatistic.java
index bd9e661..0e12ef7 100644
--- a/src/dr/evomodel/coalescent/CoalescentIntervalStatistic.java
+++ b/src/dr/evomodel/coalescent/CoalescentIntervalStatistic.java
@@ -44,19 +44,27 @@ public class CoalescentIntervalStatistic extends Statistic.Abstract {
     }
 
     public int getDimension() {
-        return coalescent.getCoalescentIntervalDimension();
+
+        throw new RuntimeException("the use of CoalescentIntervalStatistic has been deprecated");
+
+        //return coalescent.getCoalescentIntervalDimension();
+
     }
 
     public double getStatisticValue(int i) {
-        double interval = coalescent.getCoalescentInterval(i);
+
+        throw new RuntimeException("the use of CoalescentIntervalStatistic has been deprecated");
+
+        /*double interval = coalescent.getCoalescentInterval(i);
 
         if (rescaleToNe) {
             int lineages = coalescent.getCoalescentIntervalLineageCount(i);
             interval *= Binomial.choose2(lineages);
             // TODO Double-check; maybe need to return 1/interval or divide by choose2(lineages)
-        }
+        }*/
+
+        //return interval;
 
-        return interval;
     }
     
     public String getStatisticName() {
diff --git a/src/dr/evomodel/coalescent/CoalescentLikelihood.java b/src/dr/evomodel/coalescent/CoalescentLikelihood.java
index ed2b1b6..b620747 100644
--- a/src/dr/evomodel/coalescent/CoalescentLikelihood.java
+++ b/src/dr/evomodel/coalescent/CoalescentLikelihood.java
@@ -77,7 +77,7 @@ public final class CoalescentLikelihood extends AbstractCoalescentLikelihood imp
         double lnL =  Coalescent.calculateLogLikelihood(getIntervals(), demoFunction, demoFunction.getThreshold());
 
 		if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
-			Logger.getLogger("warning").severe("CoalescentLikelihood is " + Double.toString(lnL));
+			Logger.getLogger("warning").severe("CoalescentLikelihood for " + demoModel.getId() + " is " + Double.toString(lnL));
 		}
 
 		return lnL;
diff --git a/src/dr/evomodel/coalescent/EmergingEpidemicModel.java b/src/dr/evomodel/coalescent/EmergingEpidemicModel.java
index f787736..5fe2415 100644
--- a/src/dr/evomodel/coalescent/EmergingEpidemicModel.java
+++ b/src/dr/evomodel/coalescent/EmergingEpidemicModel.java
@@ -29,7 +29,6 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.ExponentialGrowth;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodelxml.coalescent.EmergingEpidemicModelParser;
-import dr.evomodelxml.coalescent.ExponentialGrowthModelParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 
diff --git a/src/dr/evomodel/coalescent/ExponentialConstantModel.java b/src/dr/evomodel/coalescent/ExponentialConstantModel.java
index 4d01bc5..f64b66d 100644
--- a/src/dr/evomodel/coalescent/ExponentialConstantModel.java
+++ b/src/dr/evomodel/coalescent/ExponentialConstantModel.java
@@ -27,9 +27,7 @@ package dr.evomodel.coalescent;
 
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.ExpConstant;
-import dr.evolution.coalescent.ExponentialLogistic;
 import dr.evomodelxml.coalescent.ExponentialConstantModelParser;
-import dr.evomodelxml.coalescent.ExponentialLogisticModelParser;
 import dr.inference.model.Parameter;
 
 /**
diff --git a/src/dr/evomodel/coalescent/ExponentialExponentialModel.java b/src/dr/evomodel/coalescent/ExponentialExponentialModel.java
index 707477e..c55dcc0 100644
--- a/src/dr/evomodel/coalescent/ExponentialExponentialModel.java
+++ b/src/dr/evomodel/coalescent/ExponentialExponentialModel.java
@@ -26,10 +26,7 @@
 package dr.evomodel.coalescent;
 
 import dr.evolution.coalescent.DemographicFunction;
-import dr.evolution.coalescent.ExpConstant;
 import dr.evolution.coalescent.ExponentialExponential;
-import dr.evolution.coalescent.PiecewiseExponentialPopulation;
-import dr.evomodelxml.coalescent.ExponentialConstantModelParser;
 import dr.evomodelxml.coalescent.ExponentialExponentialModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/evomodel/coalescent/ExponentialProductPosteriorMeansLikelihood.java b/src/dr/evomodel/coalescent/ExponentialProductPosteriorMeansLikelihood.java
index 3b018f5..5dc0b4c 100644
--- a/src/dr/evomodel/coalescent/ExponentialProductPosteriorMeansLikelihood.java
+++ b/src/dr/evomodel/coalescent/ExponentialProductPosteriorMeansLikelihood.java
@@ -38,7 +38,7 @@ public class ExponentialProductPosteriorMeansLikelihood extends Likelihood.Abstr
 	
 	//not used at the moment
 	public final static boolean FIXED_TREE = false;
-    public static final boolean DEBUG = true;
+    public static final boolean DEBUG = false;
 	
 	private TreeModel treeModel;
 	private double[] posteriorMeans;
diff --git a/src/dr/evomodel/coalescent/ExponentialSkythingLikelihood.java b/src/dr/evomodel/coalescent/ExponentialSkythingLikelihood.java
index 780b04d..9466393 100644
--- a/src/dr/evomodel/coalescent/ExponentialSkythingLikelihood.java
+++ b/src/dr/evomodel/coalescent/ExponentialSkythingLikelihood.java
@@ -25,14 +25,12 @@
 
 package dr.evomodel.coalescent;
 
-import dr.evolution.coalescent.ConstantPopulation;
 import dr.evolution.coalescent.ExponentialBSPGrowth;
 import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodelxml.coalescent.BayesianSkylineLikelihoodParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
-import dr.inference.model.Variable;
 import dr.math.MathUtils;
 
 import java.util.Date;
diff --git a/src/dr/evomodel/coalescent/GMRFDensityComponent.java b/src/dr/evomodel/coalescent/GMRFDensityComponent.java
index 85e3872..a432ee5 100644
--- a/src/dr/evomodel/coalescent/GMRFDensityComponent.java
+++ b/src/dr/evomodel/coalescent/GMRFDensityComponent.java
@@ -31,6 +31,10 @@ import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 import dr.xml.*;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
 /**
  * @author Marc Suchard
  */
@@ -79,6 +83,11 @@ public class GMRFDensityComponent implements Likelihood {
         return skyride.prettyName() + tag;
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
     public boolean isUsed() {
         return skyride.isUsed();
     }
diff --git a/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java b/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
index 735a9ac..0a9365f 100644
--- a/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
+++ b/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
@@ -45,8 +45,9 @@ import java.util.List;
  * @author Marc A. Suchard
  */
 
-public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood implements MultiLociTreeSet {
+public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood implements MultiLociTreeSet, CoalescentIntervalProvider {
 
+    public static final boolean DEBUG = false;
 
     private double cutOff;
     private int numGridPoints;
@@ -61,7 +62,6 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     // sortedPoints[i][1] is 0 if the i-th point is a grid point, 1 if it's a sampling point, and 2 if it's a coalescent point
     // sortedPoints[i][2] is the number of lineages present in the interval starting at time sortedPoints[i][0]
 
-
     protected Parameter phiParameter;
     protected Parameter ploidyFactors;
     protected double[] ploidySums;
@@ -69,6 +69,8 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     protected SymmTridiagMatrix precMatrix;
     protected SymmTridiagMatrix storedPrecMatrix;
 
+    private double[] coalescentEventStatisticValues;
+
     public GMRFMultilocusSkyrideLikelihood(List<Tree> treeList,
                                            Parameter popParameter,
                                            Parameter groupParameter,
@@ -159,6 +161,8 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
                 groupSizeParameter.setParameterValue(i, 1.0);
         }
 
+        this.coalescentEventStatisticValues = new double[getNumberOfCoalescentEvents()];
+
     }
 
 
@@ -346,7 +350,6 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
                 nextTime = intervalsList.get(i).getIntervalTime(currentTimeIndex + 1);
             }
 
-
             numLineages = intervalsList.get(i).getLineageCount(currentTimeIndex + 1);
             minGridIndex = 0;
             while (minGridIndex < numGridPoints - 1 && gridPoints[minGridIndex] <= currentTime) {
@@ -520,6 +523,46 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
         return numCoalEvents;
     }
 
+    public int getNumberOfCoalescentEvents() {
+        return getCorrectOldFieldLength();
+    }
+
+    public double getCoalescentEventsStatisticValue(int i) {
+        if (i == 0) {
+
+            if (DEBUG) {
+                System.err.println("numTrees: " + numTrees);
+                System.err.println("getCoalescentIntervalDimension(): " + super.getCoalescentIntervalDimension());
+                System.err.println("getNumberOfCoalescentEvents(): " + getNumberOfCoalescentEvents());
+                System.err.println("getIntervalCount(): " + getIntervalCount());
+                System.err.println("intervalsList.size(): " + intervalsList.size());
+                System.err.println("intervalsList.get(0).getIntervalCount(): " + intervalsList.get(0).getIntervalCount());
+            }
+
+            if (numTrees > 1) {
+                throw new RuntimeException("Generalized stepping-stone sampling for the Skygrid not implemented for #trees > 1");
+            }
+            for (int j = 0; j < coalescentEventStatisticValues.length; j++) {
+                coalescentEventStatisticValues[j] = 0.0;
+            }
+            int counter = 0;
+
+            for (int j = 0; j < intervalsList.get(0).getIntervalCount(); j++) {
+                if (intervalsList.get(0).getIntervalType(j) == IntervalType.COALESCENT) {
+                    //this.coalescentEventStatisticValues[counter] += getCoalescentInterval(j) * (getLineageCount(j) * (getLineageCount(j) - 1.0)) / 2.0;
+                    this.coalescentEventStatisticValues[counter] += intervalsList.get(0).getInterval(j) * (intervalsList.get(0).getLineageCount(j) * (intervalsList.get(0).getLineageCount(j) - 1.0)) / 2.0;
+                    counter++;
+                } else {
+                    //this.coalescentEventStatisticValues[counter] += getCoalescentInterval(j) * (getLineageCount(j) * (getLineageCount(j) - 1.0)) / 2.0;
+                    this.coalescentEventStatisticValues[counter] += intervalsList.get(0).getInterval(j) * (intervalsList.get(0).getLineageCount(j) * (intervalsList.get(0).getLineageCount(j) - 1.0)) / 2.0;
+                }
+            }
+        }
+        return coalescentEventStatisticValues[i];
+        //throw new RuntimeException("getCoalescentEventsStatisticValue(int i) not implemented for Bayesian Skygrid");
+        //return sufficientStatistics[i];
+    }
+
     protected double calculateLogCoalescentLikelihood() {
 
         if (!intervalsKnown) {
@@ -668,12 +711,12 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
         storedPloidySums = tmp2;
     }
 
-    public int getCoalescentIntervalLineageCount(int i) {
-        return 0;  //To change body of implemented methods use File | Settings | File Templates.
+    /*public int getCoalescentIntervalLineageCount(int i) {
+        return 0;
     }
 
     public IntervalType getCoalescentIntervalType(int i) {
-        return null;  //To change body of implemented methods use File | Settings | File Templates.
-    }
+        return null;
+    }*/
 }
 
diff --git a/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java b/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
index 6886eab..7f8f32f 100644
--- a/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
+++ b/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
@@ -358,12 +358,20 @@ public class GMRFSkyrideLikelihood extends OldAbstractCoalescentLikelihood imple
         return coalescentIntervals[i];
     }
 
-    public int getCoalescentIntervalLineageCount(int i) {
+    /*public int getCoalescentIntervalLineageCount(int i) {
         throw new RuntimeException("Not yet implemented");
     }
 
     public IntervalType getCoalescentIntervalType(int i) {
-        throw new RuntimeException("Not yet implemented");
+        throw new RuntimeException("getCoalescentIntervalType(int i) in GMRFSkyrideLikelihood not yet implemented");
+    }*/
+
+    public int getNumberOfCoalescentEvents() {
+        return tree.getExternalNodeCount() - 1;
+    }
+
+    public double getCoalescentEventsStatisticValue(int i) {
+        return sufficientStatistics[i];
     }
 
     public double[] getCoalescentIntervalHeights() {
diff --git a/src/dr/evomodel/coalescent/GaussianProcessMultilocusSkytrackLikelihood.java b/src/dr/evomodel/coalescent/GaussianProcessMultilocusSkytrackLikelihood.java
index 55f8cdf..cbc2bb0 100644
--- a/src/dr/evomodel/coalescent/GaussianProcessMultilocusSkytrackLikelihood.java
+++ b/src/dr/evomodel/coalescent/GaussianProcessMultilocusSkytrackLikelihood.java
@@ -31,7 +31,6 @@ import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
 //import dr.evomodelxml.coalescent.GMRFSkyrideLikelihoodParser;
 import dr.evomodelxml.coalescent.GaussianProcessSkytrackLikelihoodParser;
-import dr.inference.model.MatrixParameter;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 //import no.uib.cipr.matrix.DenseVector;
diff --git a/src/dr/evomodel/coalescent/LinearGrowthModel.java b/src/dr/evomodel/coalescent/LinearGrowthModel.java
index bf3f807..bf5b470 100644
--- a/src/dr/evomodel/coalescent/LinearGrowthModel.java
+++ b/src/dr/evomodel/coalescent/LinearGrowthModel.java
@@ -25,10 +25,8 @@
 
 package dr.evomodel.coalescent;
 
-import dr.evolution.coalescent.ConstantPopulation;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.LinearGrowth;
-import dr.evomodelxml.coalescent.ConstantPopulationModelParser;
 import dr.evomodelxml.coalescent.LinearGrowthModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/evomodel/coalescent/ExponentialConstantModel.java b/src/dr/evomodel/coalescent/MultiEpochExponentialModel.java
similarity index 54%
copy from src/dr/evomodel/coalescent/ExponentialConstantModel.java
copy to src/dr/evomodel/coalescent/MultiEpochExponentialModel.java
index 4d01bc5..076e8f7 100644
--- a/src/dr/evomodel/coalescent/ExponentialConstantModel.java
+++ b/src/dr/evomodel/coalescent/MultiEpochExponentialModel.java
@@ -1,5 +1,5 @@
 /*
- * ExponentialConstantModel.java
+ * MultiEpochExponentialModel.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -26,49 +26,33 @@
 package dr.evomodel.coalescent;
 
 import dr.evolution.coalescent.DemographicFunction;
-import dr.evolution.coalescent.ExpConstant;
-import dr.evolution.coalescent.ExponentialLogistic;
-import dr.evomodelxml.coalescent.ExponentialConstantModelParser;
-import dr.evomodelxml.coalescent.ExponentialLogisticModelParser;
+import dr.evolution.coalescent.ExponentialExponential;
+import dr.evolution.coalescent.MultiEpochExponential;
 import dr.inference.model.Parameter;
 
 /**
- * Exponential growth followed by constant size.
- *
- * @author Matthew Hall
+ * @author Marc A. Suchard
+ * @author Andrew Rambaut
  */
-public class ExponentialConstantModel extends DemographicModel {
-
-    //
-    // Public stuff
-    //
-
-    /**
-     * Construct demographic model with default settings
-     */
-    public ExponentialConstantModel(Parameter N0Parameter,
-                                    Parameter growthRateParameter,
-                                    Parameter transitionTimeParameter,
-                                    Type units) {
-
-        this(ExponentialConstantModelParser.EXPONENTIAL_CONSTANT_MODEL,
-                N0Parameter,
-                growthRateParameter,
-                transitionTimeParameter,
-                units);
-    }
+public class MultiEpochExponentialModel extends DemographicModel {
 
     /**
      * Construct demographic model with default settings
      */
-    public ExponentialConstantModel(String name, Parameter N0Parameter,
-                                    Parameter growthRateParameter,
-                                    Parameter transitionTimeParameter,
-                                    Type units) {
+    public MultiEpochExponentialModel(String name,
+                                      Parameter N0Parameter,
+                                      Parameter growthRateParameter,
+                                      Parameter transitionTimeParameter,
+                                      Type units) {
 
         super(name);
 
-        exponentialConstant = new ExpConstant(units);
+        int numEpoch = growthRateParameter.getDimension();
+        if (numEpoch != transitionTimeParameter.getDimension() + 1) {
+            throw new IllegalArgumentException("Invalid parameter dimensions");
+        }
+
+        multiEpochExponential = new MultiEpochExponential(units, numEpoch);
 
         this.N0Parameter = N0Parameter;
         addVariable(N0Parameter);
@@ -76,12 +60,13 @@ public class ExponentialConstantModel extends DemographicModel {
 
         this.growthRateParameter = growthRateParameter;
         addVariable(growthRateParameter);
-        growthRateParameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+        growthRateParameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY,
+                growthRateParameter.getDimension()));
 
         this.transitionTimeParameter = transitionTimeParameter;
         addVariable(transitionTimeParameter);
         transitionTimeParameter.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY,
-                Double.NEGATIVE_INFINITY, 1));
+                0.0, transitionTimeParameter.getDimension()));
 
         setUnits(units);
     }
@@ -90,13 +75,19 @@ public class ExponentialConstantModel extends DemographicModel {
     // general functions
 
     public DemographicFunction getDemographicFunction() {
-        exponentialConstant.setN0(N0Parameter.getParameterValue(0));
+        multiEpochExponential.setN0(N0Parameter.getParameterValue(0));
 
-        exponentialConstant.setGrowthRate(growthRateParameter.getParameterValue(0));
+        for (int i = 0; i < growthRateParameter.getDimension(); ++i) {
+            multiEpochExponential.setGrowthRate(i, growthRateParameter.getParameterValue(i));
+        }
 
-        exponentialConstant.setTransitionTime(transitionTimeParameter.getParameterValue(0));
+        double totalTime = 0.0;
+        for (int i = 0; i < transitionTimeParameter.getDimension(); ++i) {
+            totalTime += transitionTimeParameter.getParameterValue(i);
+            multiEpochExponential.setTransitionTime(i, totalTime);
+        }
 
-        return exponentialConstant;
+        return multiEpochExponential;
     }
 
     //
@@ -106,5 +97,5 @@ public class ExponentialConstantModel extends DemographicModel {
     Parameter N0Parameter = null;
     Parameter growthRateParameter = null;
     Parameter transitionTimeParameter = null;
-    ExpConstant exponentialConstant = null;
+    MultiEpochExponential multiEpochExponential = null;
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/PeakAndDeclineModel.java b/src/dr/evomodel/coalescent/PeakAndDeclineModel.java
index 7782ee6..c2e75cf 100644
--- a/src/dr/evomodel/coalescent/PeakAndDeclineModel.java
+++ b/src/dr/evomodel/coalescent/PeakAndDeclineModel.java
@@ -27,8 +27,6 @@ package dr.evomodel.coalescent;
 
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.FlexibleGrowth;
-import dr.evolution.coalescent.LogisticGrowth;
-import dr.evomodelxml.coalescent.LogisticGrowthModelParser;
 import dr.evomodelxml.coalescent.PeakAndDeclineModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/evomodel/coalescent/PowerLawGrowthModel.java b/src/dr/evomodel/coalescent/PowerLawGrowthModel.java
index dd8f1b3..af4bd6f 100644
--- a/src/dr/evomodel/coalescent/PowerLawGrowthModel.java
+++ b/src/dr/evomodel/coalescent/PowerLawGrowthModel.java
@@ -26,9 +26,7 @@
 package dr.evomodel.coalescent;
 
 import dr.evolution.coalescent.DemographicFunction;
-import dr.evolution.coalescent.ExponentialGrowth;
 import dr.evolution.coalescent.PowerLawGrowth;
-import dr.evomodelxml.coalescent.ExponentialGrowthModelParser;
 import dr.evomodelxml.coalescent.PowerLawGrowthModelParser;
 import dr.inference.model.Parameter;
 
diff --git a/src/dr/evomodel/coalescent/TwoEpochDemographicModel.java b/src/dr/evomodel/coalescent/TwoEpochDemographicModel.java
index 417df6e..4ef20a1 100644
--- a/src/dr/evomodel/coalescent/TwoEpochDemographicModel.java
+++ b/src/dr/evomodel/coalescent/TwoEpochDemographicModel.java
@@ -61,13 +61,13 @@ public class TwoEpochDemographicModel extends DemographicModel {
         this.demo1 = demo1;
         addModel(demo1);
         for (int i = 0; i < demo1.getVariableCount(); i++) {
-            addVariable((Parameter)demo1.getVariable(i));
+            addVariable((Parameter) demo1.getVariable(i));
         }
 
         this.demo2 = demo2;
         addModel(demo2);
         for (int i = 0; i < demo2.getVariableCount(); i++) {
-            addVariable((Parameter)demo2.getVariable(i));
+            addVariable((Parameter) demo2.getVariable(i));
         }
 
         this.transitionTimeParameter = transitionTimeParameter;
diff --git a/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
index 649a657..1bdc387 100644
--- a/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
@@ -34,7 +34,6 @@ import dr.inference.distribution.MultivariateDistributionLikelihood;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 import dr.inference.model.*;
-import dr.math.MathUtils;
 import dr.math.distributions.MultivariateDistribution;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.stats.DiscreteStatistics;
@@ -43,7 +42,6 @@ import dr.util.Citable;
 import dr.util.Citation;
 import dr.util.CommonCitations;
 import dr.xml.*;
-import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
diff --git a/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
index 73b1874..c0f72bd 100644
--- a/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
@@ -29,9 +29,7 @@ import dr.evolution.tree.MultivariateTraitTree;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.inference.model.CompoundParameter;
-import dr.inference.model.Model;
-import dr.inference.model.Parameter;
+import dr.inference.model.*;
 import dr.math.KroneckerOperation;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.distributions.NormalDistribution;
@@ -237,9 +235,35 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
         super.handleModelChangedEvent(model, object, index);
     }
 
+    @Override
+    protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type){
+        if(variable==traitParameter &&(Parameter.ChangeType.ADDED==type || Parameter.ChangeType.REMOVED==type)){
+            dimKnown=false;
+        }
+        PostPreKnown=false;
+        super.handleVariableChangedEvent(variable,index,type);
+    }
+
+    @Override
+    public void storeState() {
+        super.storeState();
+        storedPostPreKnown=PostPreKnown;
+        storedDimKnown=dimKnown;
+        if(preP!=null)
+         System.arraycopy(preP, 0, storedPreP, 0,preP.length);
+        if(preMeans!=null)
+         System.arraycopy(preMeans, 0, storedPreMeans, 0, preMeans.length);
+
+    }
+
+    @Override
     public void restoreState() {
         super.restoreState();
+        PostPreKnown=storedPostPreKnown;
         priorInformationKnown = false;
+        preP=storedPreP;
+        preMeans=storedPreMeans;
+        dimKnown=storedDimKnown;
     }
 
     public void makeDirty() {
@@ -260,6 +284,155 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
         return computeWishartStatistics;
     }
 
+    public void doPreOrderTraversal(NodeRef node) {
+
+        if(preP==null){
+            preP=new double[treeModel.getNodeCount()];
+            storedPreP=new double[treeModel.getNodeCount()];
+        }
+        if(!dimKnown){
+            preMeans=new double[treeModel.getNodeCount()][getRootNodeTrait().length];
+            storedPreMeans=new double[treeModel.getNodeCount()][getRootNodeTrait().length];
+            dimKnown=true;
+        }
+
+        final int thisNumber = node.getNumber();
+
+
+        if (treeModel.isRoot(node)) {
+            preP[thisNumber] = rootPriorSampleSize;
+            for (int j = 0; j < dim; j++) {
+                preMeans[thisNumber][j] = rootPriorMean[j];
+            }
+
+
+        } else {
+
+            final NodeRef parentNode = treeModel.getParent(node);
+            final NodeRef sibNode = getSisterNode(node);
+
+            final int parentNumber = parentNode.getNumber();
+            final int sibNumber = sibNode.getNumber();
+
+
+
+	/*
+
+			  if (treeModel.isRoot(parentNode)){
+				  //partial precisions
+				    final double precisionParent = rootPriorSampleSize;
+			        final double precisionSib = postP[sibNumber];
+			        final double thisPrecision=1/treeModel.getBranchLength(node);
+			        double tp= precisionParent + precisionSib;
+			        preP[thisNumber]= tp*thisPrecision/(tp+thisPrecision);
+
+			        //partial means
+
+			        for (int j =0; j<dim;j++){
+			        	preMeans[thisNumber][j] = (precisionParent*preMeans[parentNumber][j] + precisionSib*rootPriorMean[j])/(precisionParent+precisionSib);
+			        }
+
+			  }else{
+	*/
+            //partial precisions
+            final double precisionParent = preP[parentNumber];
+            final double precisionSib = upperPrecisionCache[sibNumber];
+            final double thisPrecision = 1 / getRescaledBranchLengthForPrecision(node);
+            double tp = precisionParent + precisionSib;
+            preP[thisNumber] = tp * thisPrecision / (tp + thisPrecision);
+
+            //partial means
+
+            for (int j = 0; j < dim; j++) {
+                preMeans[thisNumber][j] = (precisionParent * preMeans[parentNumber][j] + precisionSib * cacheHelper.getMeanCache()[sibNumber*dim+j]) / (precisionParent + precisionSib);
+            }
+        }
+
+        if (treeModel.isExternal(node)) {
+            return;
+        } else {
+            doPreOrderTraversal(treeModel.getChild(node, 0));
+            doPreOrderTraversal(treeModel.getChild(node, 1));
+
+        }
+
+    }
+
+    public NodeRef getSisterNode(NodeRef node) {
+        NodeRef sib0 = treeModel.getChild(treeModel.getParent(node), 0);
+        NodeRef sib1 = treeModel.getChild(treeModel.getParent(node), 1);
+
+
+        if (sib0 == node) {
+            return sib1;
+        } else return sib0;
+
+    }
+
+    public double[] getConditionalMean(int taxa){
+        setup();
+
+
+//            double[] answer=new double[getRootNodeTrait().length];
+
+        double[] mean = new double[dim];
+        for (int i = 0; i < dim; i++) {
+            mean[i] = preMeans[taxa][i];
+        }
+
+        return mean;
+    }
+
+    public double getPrecisionFactor(int taxa){
+        setup();
+        return preP[taxa];
+    }
+
+    public double[][] getConditionalPrecision(int taxa){
+         setup();
+
+
+
+
+        double[][] precisionParam =diffusionModel.getPrecisionmatrix();
+//        double[][] answer=new double[getRootNodeTrait().length][ getRootNodeTrait().length];
+        double p = getPrecisionFactor(taxa);
+
+        double[][] thisP = new double[dim][dim];
+
+        for (int i = 0; i < dim; i++) {
+            for (int j = 0; j < dim; j++) {
+//                System.out.println("P: "+p);
+//                System.out.println("I: "+i+", J: "+j+" value:"+precisionParam[i][j]);
+                thisP[i][j] = p * precisionParam[i][ j];
+
+            }
+        }
+
+        return thisP;
+
+    }
+
+    private void setup(){
+        if(!PostPreKnown){
+            double[][] traitPrecision = diffusionModel.getPrecisionmatrix();
+            double logDetTraitPrecision = Math.log(diffusionModel.getDeterminantPrecisionMatrix());
+
+            final boolean computeWishartStatistics = getComputeWishartSufficientStatistics();
+
+            if (computeWishartStatistics) {
+                wishartStatistics = new WishartSufficientStatistics(dimTrait);
+            }
+
+            // Use dynamic programming to compute conditional likelihoods at each internal node
+            postOrderTraverse(treeModel, treeModel.getRoot(), traitPrecision, logDetTraitPrecision, computeWishartStatistics);
+
+            doPreOrderTraversal(treeModel.getRoot());}
+        PostPreKnown=true;
+
+    }
+
+
     protected void checkLogLikelihood(double loglikelihood, double logRemainders,
                                       double[] conditionalRootMean, double conditionalRootPrecision,
                                       double[][] traitPrecision) {
@@ -387,9 +560,21 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
     protected double[] rootPriorMean;
     protected double rootPriorSampleSize;
 
+    double[] preP;
+    double[][] preMeans;
+
+    double[] storedPreP;
+    double[][] storedPreMeans;
+
+    Boolean PostPreKnown=false;
+    Boolean storedPostPreKnown=false;
+
     private boolean priorInformationKnown = false;
     private double zBz; // Prior sum-of-squares contribution
 
+    private boolean dimKnown=false;
+    private boolean storedDimKnown=false;
+
     protected boolean computeWishartStatistics = false;
     private double[] ascertainedData = null;
     private static final boolean DEBUG_ASCERTAINMENT = false;
diff --git a/src/dr/evomodel/continuous/GaussianProcessFromTree.java b/src/dr/evomodel/continuous/GaussianProcessFromTree.java
index 9f3667a..53a2357 100644
--- a/src/dr/evomodel/continuous/GaussianProcessFromTree.java
+++ b/src/dr/evomodel/continuous/GaussianProcessFromTree.java
@@ -50,14 +50,16 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
         return traitModel;
     }
 
+    public double getLogLikelihood() { return traitModel.getLogLikelihood(); }
+
     //    boolean firstTime=true;
     public double[] nextRandomFast() {
 
-        double[] random = new double[traitModel.getTreeModel().getExternalNodeCount()*traitModel.getDimTrait()];
+        double[] random = new double[traitModel.getTreeModel().getExternalNodeCount() * traitModel.getDimTrait()];
         NodeRef root = traitModel.getTreeModel().getRoot();
-        double[] traitStart=traitModel.getPriorMean();
-        double[][] varianceCholesky=null;
-        double[][] temp= new SymmetricMatrix(traitModel.getDiffusionModel().getPrecisionmatrix()).inverse().toComponents();
+        double[] traitStart = traitModel.getPriorMean();
+        double[][] varianceCholesky = null;
+        double[][] temp = new SymmetricMatrix(traitModel.getDiffusionModel().getPrecisionmatrix()).inverse().toComponents();
         try {
             varianceCholesky = (new CholeskyDecomposition(temp).getL());
         } catch (IllegalDimension illegalDimension) {
@@ -89,7 +91,17 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
 //            }
 //            firstTime=false;
 //        }
-        nextRandomFast(traitStart, root, random, varianceCholesky);
+        if (USE_BUFFER) {
+            final int length = traitModel.getDimTrait();
+            final int nodeCount = traitModel.getTreeModel().getNodeCount();
+            double[] currentValue = new double[(nodeCount + 1) * length];
+            double[] epsilon = new double[length];
+            final int priorOffset = nodeCount * length;
+            System.arraycopy(traitStart, 0, currentValue, priorOffset, length);
+            nextRandomFast2(currentValue, priorOffset, root, random, varianceCholesky, epsilon);
+        } else {
+            nextRandomFast(traitStart, root, random, varianceCholesky);
+        }
 //        }
         return random;
     }
@@ -97,7 +109,7 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
     private void nextRandomFast(double[] currentValue, NodeRef currentNode, double[] random, double[][] varianceCholesky) {
 
         double rescaledLength = (traitModel.getTreeModel().isRoot(currentNode)) ?
-            1.0 / traitModel.getPriorSampleSize() :
+                1.0 / traitModel.getPriorSampleSize() :
                 traitModel.getRescaledBranchLengthForPrecision(currentNode);
 
         double scale = Math.sqrt(rescaledLength);
@@ -115,6 +127,43 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
         }
     }
 
+    private void nextRandomFast2(double[] currentValue, int parentOffset, NodeRef currentNode, double[] random,
+                                 double[][] varianceCholesky, double[] epsilon) {
+
+        final int length = varianceCholesky.length;
+
+        double rescaledLength = (traitModel.getTreeModel().isRoot(currentNode)) ?
+                1.0 / traitModel.getPriorSampleSize() :
+                traitModel.getRescaledBranchLengthForPrecision(currentNode);
+
+        double scale = Math.sqrt(rescaledLength);
+
+        final int currentOffset = currentNode.getNumber() * length;
+
+        // draw ~ MNV(mean = currentValue at parent, variance = scale * scale * L^t L)
+        MultivariateNormalDistribution.nextMultivariateNormalCholesky(
+                currentValue, parentOffset, // mean at parent
+                varianceCholesky, scale,
+                currentValue, currentOffset, // result at current
+                epsilon);
+
+        if (traitModel.getTreeModel().isExternal(currentNode)) {
+            System.arraycopy(
+                    currentValue, currentOffset, // result at tip
+                    random, currentOffset, // into final results buffer
+                    length);
+        } else {
+            int childCount = traitModel.getTreeModel().getChildCount(currentNode);
+            for (int i = 0; i < childCount; i++) {
+                nextRandomFast2(
+                        currentValue, currentOffset,
+                        traitModel.getTreeModel().getChild(currentNode, i),
+                        random, varianceCholesky, epsilon);
+            }
+        }
+    }
+
+
     @Override
     public Object nextRandom() {
         return nextRandomFast();
@@ -132,4 +181,6 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
 
         return traitModel.getLogLikelihood();
     }
+
+    private static final boolean USE_BUFFER = true;
 }
diff --git a/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
index 0580e6c..5059d69 100644
--- a/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
@@ -85,7 +85,7 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
     }
 
 
-    private final CacheHelper cacheHelper;
+    protected final CacheHelper cacheHelper;
 
     public IntegratedMultivariateTraitLikelihood(String traitName,
                                                  MultivariateTraitTree treeModel,
diff --git a/src/dr/evomodel/epidemiology/SIRModel.java b/src/dr/evomodel/epidemiology/SIRModel.java
index fb1d62b..4c3c85b 100644
--- a/src/dr/evomodel/epidemiology/SIRModel.java
+++ b/src/dr/evomodel/epidemiology/SIRModel.java
@@ -32,6 +32,10 @@ import dr.inference.loggers.NumberColumn;
 import dr.inference.model.*;
 import dr.math.distributions.NormalDistribution;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
 
 /**
  * This class gives an SIR trajectory and hands off a rate of coalescence at a given point in time.
@@ -164,6 +168,11 @@ public class SIRModel extends DemographicModel implements Likelihood {
         return false;
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
     public boolean isUsed() {
         return isUsed;
     }
diff --git a/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java b/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
index 4927f4b..de4cac2 100644
--- a/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
+++ b/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
@@ -186,6 +186,11 @@ public abstract class AbstractOutbreak extends AbstractModel implements PatternL
         return out;
     }
 
+    @Override
+    public boolean areUnique() {
+        return false;
+    }
+
     public DataType getDataType(){
         return caseDataType;
     }
diff --git a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
index 18c3b02..b5aca13 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
@@ -26,22 +26,20 @@
 package dr.evomodel.epidemiology.casetocase;
 
 import dr.evomodel.coalescent.DemographicModel;
-import dr.inference.distribution.GammaDistributionModel;
+import dr.evomodel.epidemiology.casetocase.periodpriors.AbstractPeriodPriorDistribution;
+import dr.inference.distribution.ParametricDistributionModel;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.Loggable;
 import dr.inference.model.*;
-import dr.math.*;
-import dr.math.distributions.GammaDistribution;
 import dr.xml.*;
 
-import java.math.BigDecimal;
 import java.util.*;
 
 /**
  * A likelihood function for transmission between identified epidemiological outbreak
  *
  * Timescale must be in days. Python scripts to write XML for it and analyse the posterior set of networks exist;
- * contact MH. @todo make timescale not just in days
+ * contact MH.
  *
  * Latent periods are not implemented currently
  *
@@ -53,45 +51,48 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
 
     private static final boolean DEBUG = false;
 
-    private AbstractOutbreak outbreak;
+    private CategoryOutbreak outbreak;
     private CaseToCaseTreeLikelihood treeLikelihood;
     private SpatialKernel spatialKernel;
     private Parameter transmissionRate;
-    private GammaDistributionModel transmissionRatePrior;
+
     private boolean likelihoodKnown;
     private boolean storedLikelihoodKnown;
     private boolean transProbKnown;
     private boolean storedTransProbKnown;
-    private boolean normalisationKnown;
-    private boolean storedNormalisationKnown;
-    private boolean geographyProbKnown;
-    private boolean storedGeographyProbKnown;
+    private boolean periodsProbKnown;
+    private boolean storedPeriodsProbKnown;
     private boolean treeProbKnown;
     private boolean storedTreeProbKnown;
     private double logLikelihood;
     private double storedLogLikelihood;
+
     private double transLogProb;
     private double storedTransLogProb;
-    private double normalisation;
-    private double storedNormalisation;
-    private double geographyLogProb;
-    private double storedGeographyLogProb;
+    private double periodsLogProb;
+    private double storedPeriodsLogProb;
     private double treeLogProb;
     private double storedTreeLogProb;
 
-    private double betaGammaThing;
-    private double storedBetaGammaThing;
+    private ParametricDistributionModel intialInfectionTimePrior;
+    private HashMap<AbstractCase, Double> indexCasePrior;
 
     private final boolean hasGeography;
-    private ArrayList<AbstractCase> sortedCases;
-    private ArrayList<AbstractCase> storedSortedCases;
+    private final boolean hasLatentPeriods;
+    private ArrayList<TreeEvent> sortedTreeEvents;
+    private ArrayList<TreeEvent> storedSortedTreeEvents;
+
+    private AbstractCase indexCase;
+    private AbstractCase storedIndexCase;
+
 //    private F f;
 
     public static final String CASE_TO_CASE_TRANSMISSION_LIKELIHOOD = "caseToCaseTransmissionLikelihood";
 
-    public CaseToCaseTransmissionLikelihood(String name, AbstractOutbreak outbreak,
+    public CaseToCaseTransmissionLikelihood(String name, CategoryOutbreak outbreak,
                                             CaseToCaseTreeLikelihood treeLikelihood, SpatialKernel spatialKernal,
-                                            Parameter transmissionRate, GammaDistributionModel transmissionRatePrior){
+                                            Parameter transmissionRate,
+                                            ParametricDistributionModel intialInfectionTimePrior){
         super(name);
         this.outbreak = outbreak;
         this.treeLikelihood = treeLikelihood;
@@ -100,13 +101,30 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
             this.addModel(spatialKernal);
         }
         this.transmissionRate = transmissionRate;
-        this.transmissionRatePrior = transmissionRatePrior;
         this.addModel(treeLikelihood);
         this.addVariable(transmissionRate);
         likelihoodKnown = false;
         hasGeography = spatialKernal!=null;
-//        f = new F(hasGeography);
-        sortCases();
+        this.hasLatentPeriods = treeLikelihood.hasLatentPeriods();
+
+        this.intialInfectionTimePrior = intialInfectionTimePrior;
+
+
+        HashMap<AbstractCase, Double> weightMap = outbreak.getWeightMap();
+
+        double totalWeights = 0;
+
+        for(AbstractCase aCase : weightMap.keySet()){
+            totalWeights += weightMap.get(aCase);
+        }
+
+        indexCasePrior = new HashMap<AbstractCase, Double>();
+
+        for(AbstractCase aCase : outbreak.getCases()){
+            indexCasePrior.put(aCase, weightMap.get(aCase)/totalWeights );
+        }
+
+        sortEvents();
     }
 
     protected void handleModelChangedEvent(Model model, Object object, int index) {
@@ -115,21 +133,23 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
             treeProbKnown = false;
             if(!(object instanceof DemographicModel)){
                 transProbKnown = false;
-                normalisationKnown = false;
-                geographyProbKnown = false;
-                sortedCases = null;
+                periodsProbKnown = false;
+                sortedTreeEvents = null;
+                indexCase = null;
             }
+
+
         } else if(model instanceof SpatialKernel){
 
             transProbKnown = false;
-            normalisationKnown = false;
-            geographyProbKnown = false;
 
         } else if(model instanceof AbstractOutbreak){
 
             transProbKnown = false;
-            normalisationKnown = false;
-            geographyProbKnown = false;
+            periodsProbKnown = false;
+            sortedTreeEvents = null;
+            indexCase = null;
+
 
         }
         likelihoodKnown = false;
@@ -140,7 +160,6 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
     protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
         if(variable==transmissionRate){
             transProbKnown = false;
-            normalisationKnown = false;
         }
         likelihoodKnown = false;
     }
@@ -148,16 +167,14 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
     protected void storeState() {
         storedLogLikelihood = logLikelihood;
         storedLikelihoodKnown = likelihoodKnown;
-        storedNormalisation = normalisation;
-        storedNormalisationKnown = normalisationKnown;
+        storedPeriodsLogProb = periodsLogProb;
+        storedPeriodsProbKnown = periodsProbKnown;
         storedTransLogProb = transLogProb;
         storedTransProbKnown = transProbKnown;
         storedTreeLogProb = treeLogProb;
         storedTreeProbKnown = treeProbKnown;
-        storedGeographyLogProb = geographyLogProb;
-        storedGeographyProbKnown = geographyProbKnown;
-        storedBetaGammaThing = betaGammaThing;
-        storedSortedCases = new ArrayList<AbstractCase>(sortedCases);
+        storedSortedTreeEvents = new ArrayList<TreeEvent>(sortedTreeEvents);
+        storedIndexCase = indexCase;
     }
 
     protected void restoreState() {
@@ -167,12 +184,10 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
         transProbKnown = storedTransProbKnown;
         treeLogProb = storedTreeLogProb;
         treeProbKnown = storedTreeProbKnown;
-        normalisation = storedNormalisation;
-        normalisationKnown = storedNormalisationKnown;
-        geographyLogProb = storedGeographyLogProb;
-        geographyProbKnown = storedGeographyProbKnown;
-        betaGammaThing = storedBetaGammaThing;
-        sortedCases = storedSortedCases;
+        periodsLogProb = storedPeriodsLogProb;
+        periodsProbKnown = storedPeriodsProbKnown;
+        sortedTreeEvents = storedSortedTreeEvents;
+        indexCase = storedIndexCase;
     }
 
     protected void acceptState() {
@@ -202,86 +217,202 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 treeLikelihood.prepareTimings();
             }
             if (!transProbKnown) {
-                double rate = transmissionRate.getParameterValue(0);
 
                 try {
-                    double K = getK();
 
-                    // not necessary to actually add it in because it cancels, but need to check the exception
-                    getLogD();
-                    double E = getE();
-                    //double logF = f.logEvaluate(rate);
+                    transLogProb = 0;
 
-                    transLogProb = K * Math.log(rate) - E * rate; // + logF;
+                    if (sortedTreeEvents == null) {
+                        sortEvents();
+                    }
 
-                    transProbKnown = true;
+                    double rate = transmissionRate.getParameterValue(0);
+
+                    ArrayList<AbstractCase> previouslyInfectious = new ArrayList<AbstractCase>();
+
+                    double currentEventTime;
+                    boolean first = true;
+
+                    for (TreeEvent event : sortedTreeEvents) {
+                        currentEventTime = event.getTime();
+
+                        AbstractCase thisCase = event.getCase();
+
+                        if (event.getType() == EventType.INFECTION) {
+                            if (first) {
+                                // index infection
+
+                                if (indexCasePrior != null) {
+                                    transLogProb += Math.log(indexCasePrior.get(thisCase));
+                                }
+                                if (intialInfectionTimePrior != null) {
+                                    transLogProb += intialInfectionTimePrior.logPdf(currentEventTime);
+                                }
+
+                                if (!hasLatentPeriods) {
+                                    previouslyInfectious.add(thisCase);
+                                }
+
+                                first = false;
+
+                            } else {
+
+                                AbstractCase infector = event.getInfector();
+
+                                if(thisCase.wasEverInfected()) {
+
+
+                                    if (previouslyInfectious.contains(thisCase)){
+                                        throw new BadPartitionException(thisCase.caseID +
+                                                " infected after it was infectious");
+                                    }
+
+                                    if (event.getTime() > thisCase.endOfInfectiousTime){
+                                        throw new BadPartitionException(thisCase.caseID +
+                                                " ceased to be infected before it was infected");
+                                    }
+                                    if (infector.endOfInfectiousTime < event.getTime()){
+                                        throw new BadPartitionException(thisCase.caseID + " infected by "
+                                                + infector.caseID + " after the latter ceased to be infectious");
+                                    }
+                                    if (treeLikelihood.getInfectiousTime(infector) > event.getTime()) {
+                                        throw new BadPartitionException(thisCase.caseID + " infected by "
+                                                + infector.caseID + " before the latter became infectious");
+                                    }
+
+                                    if(!previouslyInfectious.contains(infector)){
+                                        throw new RuntimeException("Infector not previously infected");
+                                    }
+                                }
+
+                                // no other previously infectious case has infected this case...
+
+                                for (AbstractCase nonInfector : previouslyInfectious) {
+
+
+
+                                    double timeDuringWhichNoInfection;
+                                    if (nonInfector.endOfInfectiousTime < event.getTime()) {
+                                        timeDuringWhichNoInfection = nonInfector.endOfInfectiousTime
+                                                - treeLikelihood.getInfectiousTime(nonInfector);
+                                    } else {
+                                        timeDuringWhichNoInfection = event.getTime()
+                                                - treeLikelihood.getInfectiousTime(nonInfector);
+                                    }
+
+                                    if(timeDuringWhichNoInfection<0){
+                                        throw new RuntimeException("negative time");
+                                    }
+
+                                    double transRate = rate;
+                                    if (hasGeography) {
+                                        transRate *= outbreak.getKernelValue(thisCase, nonInfector, spatialKernel);
+                                    }
+
+                                    transLogProb += -transRate * timeDuringWhichNoInfection;
+
+
+                                }
+
+                                // ...until the end
+
+                                if(thisCase.wasEverInfected()) {
+                                    double transRate = rate;
+
+
+                                    if (hasGeography) {
+                                        transRate *= outbreak.getKernelValue(thisCase, infector, spatialKernel);
+                                    }
+
+
+                                    transLogProb += Math.log(transRate);
+                                }
+
+                                if (!hasLatentPeriods) {
+                                    previouslyInfectious.add(thisCase);
+                                }
 
-                    if (!normalisationKnown) {
 
-                        normalisation = GammaFunction.lnGamma(K + 1) - (K+1)*Math.log(E);
+                            }
 
-                        betaGammaThing = (K+1)/E;
 
-                        //integrator.setAlphaAndB(K,E);
+                        } else if (event.getType() == EventType.INFECTIOUSNESS) {
+                            if (event.getTime() < Double.POSITIVE_INFINITY) {
 
-                        //normalisation = integrator.logIntegrate(f, transmissionRate.getBounds().getLowerLimit(0));
+                                if(event.getTime() > event.getCase().endOfInfectiousTime){
+                                    throw new BadPartitionException(event.getCase().caseID + " noninfectious before" +
+                                            "infectious");
+                                }
 
-                        // not necessary because it cancels
-                        //normalisation += logD;
+                                if (first) {
+                                    throw new RuntimeException("First event is not an infection");
+                                }
 
-                        normalisationKnown = true;
+                                previouslyInfectious.add(thisCase);
+                            }
+                        }
                     }
+
+                    transProbKnown = true;
                 } catch (BadPartitionException e) {
                     transLogProb = Double.NEGATIVE_INFINITY;
+                    transProbKnown = true;
                     logLikelihood = Double.NEGATIVE_INFINITY;
                     likelihoodKnown = true;
                     return logLikelihood;
+
                 }
 
 
             }
-            if(!geographyProbKnown){
-                geographyLogProb = 0;
 
-                for(AbstractCase aCase : outbreak.getCases()){
-                    if(aCase.wasEverInfected()) {
+            if(!periodsProbKnown){
 
-                        int number = outbreak.getCaseIndex(aCase);
+                periodsLogProb = 0;
 
-                        double infectionTime = treeLikelihood.getInfectionTime(aCase);
-                        AbstractCase parent = treeLikelihood.getInfector(aCase);
-                        if (parent != null) {
-                            if (treeLikelihood.getInfectiousTime(parent) > infectionTime
-                                    || parent.culledYet(infectionTime)) {
-                                geographyLogProb += Double.NEGATIVE_INFINITY;
-                            } else {
-                                double numerator = outbreak.getKernelValue(aCase, parent, spatialKernel);
-                                double denominator = 0;
-
-                                for (int i = 0; i < outbreak.size(); i++) {
-                                    AbstractCase parentCandidate = outbreak.getCase(i);
-                                    if(parentCandidate.wasEverInfected()) {
-
-                                        if (i != number && treeLikelihood.getInfectiousTime(parentCandidate) < infectionTime
-                                                && !parentCandidate.culledYet(infectionTime)) {
-                                            denominator += (outbreak.getKernelValue(aCase, parentCandidate,
-                                                    spatialKernel));
-                                        }
-                                    }
-                                }
+                HashMap<String, ArrayList<Double>> infectiousPeriodsByCategory
+                        = new HashMap<String, ArrayList<Double>>();
 
-                                geographyLogProb += Math.log(numerator / denominator);
-                            }
-                        } else {
-                            // probability of first infection given all the timings is 1
 
-                            geographyLogProb += 0;
+                for (AbstractCase aCase : outbreak.getCases()) {
+                    if(aCase.wasEverInfected()) {
+
+                        String category = (outbreak).getInfectiousCategory(aCase);
+
+                        if (!infectiousPeriodsByCategory.keySet().contains(category)) {
+                            infectiousPeriodsByCategory.put(category, new ArrayList<Double>());
                         }
+
+                        ArrayList<Double> correspondingList
+                                = infectiousPeriodsByCategory.get(category);
+
+                        correspondingList.add(treeLikelihood.getInfectiousPeriod(aCase));
                     }
                 }
-                geographyProbKnown = true;
+
+
+                for (String category : outbreak.getInfectiousCategories()) {
+
+                    Double[] infPeriodsInThisCategory = infectiousPeriodsByCategory.get(category)
+                            .toArray(new Double[infectiousPeriodsByCategory.size()]);
+
+                    AbstractPeriodPriorDistribution hyperprior = outbreak.getInfectiousCategoryPrior(category);
+
+                    double[] values = new double[infPeriodsInThisCategory.length];
+
+                    for (int i = 0; i < infPeriodsInThisCategory.length; i++) {
+                        values[i] = infPeriodsInThisCategory[i];
+                    }
+
+                    periodsLogProb += hyperprior.getLogLikelihood(values);
+
+                }
+
+                periodsProbKnown = true;
+
             }
 
+
             if(!treeProbKnown){
                 treeLogProb = treeLikelihood.getLogLikelihood();
                 treeProbKnown = true;
@@ -293,12 +424,8 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 System.out.println("TransLogProb +INF");
                 return Double.NEGATIVE_INFINITY;
             }
-            if(geographyLogProb == Double.POSITIVE_INFINITY){
-                System.out.println("GeogLogProb +INF");
-                return Double.NEGATIVE_INFINITY;
-            }
-            if(normalisation == Double.NEGATIVE_INFINITY){
-                System.out.println("Normalisation +INF");
+            if(periodsLogProb == Double.POSITIVE_INFINITY){
+                System.out.println("PeriodsLogProb +INF");
                 return Double.NEGATIVE_INFINITY;
             }
             if(treeLogProb == Double.POSITIVE_INFINITY){
@@ -306,7 +433,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 return Double.NEGATIVE_INFINITY;
             }
 
-            logLikelihood =  treeLogProb + geographyLogProb + transLogProb - normalisation;
+            logLikelihood =  treeLogProb + periodsLogProb + transLogProb;
             likelihoodKnown = true;
         }
 
@@ -334,234 +461,106 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
     public void makeDirty() {
         likelihoodKnown = false;
         transProbKnown = false;
-        normalisationKnown = false;
-        geographyProbKnown = false;
+        periodsProbKnown = false;
         treeProbKnown = false;
-        sortedCases = null;
+        sortedTreeEvents = null;
         treeLikelihood.makeDirty();
+        indexCase = null;
     }
 
-    private class CaseInfectionComparator implements Comparator<AbstractCase> {
-        public int compare(AbstractCase abstractCase, AbstractCase abstractCase2) {
-            return Double.compare(treeLikelihood.getInfectionTime(abstractCase),
-                    treeLikelihood.getInfectionTime(abstractCase2));
+    private class EventComparator implements Comparator<TreeEvent> {
+        public int compare(TreeEvent treeEvent1, TreeEvent treeEvent2) {
+            return Double.compare(treeEvent1.getTime(),
+                    treeEvent2.getTime());
         }
     }
 
 
-    private void sortCases(){
-        sortedCases = new ArrayList<AbstractCase>(outbreak.getCases());
-        Collections.sort(sortedCases, new CaseInfectionComparator());
+    private enum EventType{
+        INFECTION,
+        INFECTIOUSNESS,
+        END
     }
 
-    private double getLogD(){
+    private void sortEvents(){
+        ArrayList<TreeEvent> out = new ArrayList<TreeEvent>();
+        for(AbstractCase aCase : outbreak.getCases()){
 
-        if(sortedCases == null){
-            sortCases();
-        }
-        double logD = 0;
 
-        for (AbstractCase infectee : sortedCases) {
-            if(infectee.wasEverInfected()) {
-                double infecteeInfected = treeLikelihood.getInfectionTime(infectee);
-                double infecteeInfectious = treeLikelihood.getInfectiousTime(infectee);
-                double infecteeNoninfectious = infectee.getCullTime();
+            double infectionTime = treeLikelihood.getInfectionTime(aCase);
+            out.add(new TreeEvent(infectionTime, aCase, treeLikelihood.getInfector(outbreak.getCaseIndex(aCase))));
 
-                if (infecteeInfected > infecteeInfectious || infecteeInfectious > infecteeNoninfectious) {
-                    throw new BadPartitionException("Illegal partition given known timings");
-                }
+            if(aCase.wasEverInfected()) {
 
-                AbstractCase infector = treeLikelihood.getInfector(infectee);
-                if (infector != null) {
-                    double infectorInfectious = treeLikelihood.getInfectiousTime(infector);
-                    double infectorNoninfectious = infector.getCullTime();
+                double endTime = aCase.endOfInfectiousTime;
 
-                    if (infecteeInfected < infectorInfectious || infecteeInfected > infectorNoninfectious) {
-                        throw new BadPartitionException("Illegal partition given known timings");
-                    }
+                out.add(new TreeEvent(EventType.END, endTime, aCase));
+
+                if (hasLatentPeriods) {
+                    double infectiousnessTime = treeLikelihood.getInfectiousTime(aCase);
+                    out.add(new TreeEvent(EventType.INFECTIOUSNESS, infectiousnessTime, aCase));
 
-                    if (hasGeography) {
-                        logD += Math.log(outbreak.getKernelValue(infectee, infector, spatialKernel));
-                    }
                 }
             }
         }
 
+        Collections.sort(out, new EventComparator());
 
-        if(transmissionRatePrior!=null) {
-            logD += -transmissionRatePrior.getShape() * Math.log(transmissionRatePrior.getScale());
-            logD += -GammaFunction.lnGamma(transmissionRatePrior.getShape());
-        }
-
-        return logD;
 
-    }
 
-    private double getD(){
-        return Math.exp(getLogD());
-    }
+        indexCase = out.get(0).getCase();
 
-    private double getE(){
-        double E = 0;
-
-        if(sortedCases == null){
-            sortCases();
+        if(indexCase == null){
+            System.out.println();
         }
-        for(AbstractCase infectee : sortedCases){
-
-            if (infectee != treeLikelihood.getRootCase()) {
-
-                double[] kernelValues = outbreak.getKernelValues(infectee, spatialKernel);
-
-                double infecteeInfected = treeLikelihood.getInfectionTime(infectee);
 
-                for (AbstractCase possibleInfector : sortedCases) {
-                    if (possibleInfector.wasEverInfected() && possibleInfector != infectee) {
+        sortedTreeEvents = out;
 
-                        double nonInfectorInfected = treeLikelihood.getInfectionTime(possibleInfector);
-                        double nonInfectorInfectious = treeLikelihood.getInfectiousTime(possibleInfector);
-                        double nonInfectorNoninfectious = possibleInfector.getCullTime();
-
-                        if (nonInfectorInfected > infecteeInfected) {
-                            break;
-                        }
+    }
 
-                        double kernelValue = hasGeography ? kernelValues[outbreak.getCaseIndex(possibleInfector)] : 1;
+    private class TreeEvent{
 
-                        if (nonInfectorInfectious <= infecteeInfected) {
-                            double lastPossibleInfectionTime = Math.min(nonInfectorNoninfectious, infecteeInfected);
-                            E += kernelValue * (lastPossibleInfectionTime - nonInfectorInfectious);
-                        }
+        private EventType type;
+        private double time;
+        private AbstractCase aCase;
+        private AbstractCase infectorCase;
 
-                    }
+        private TreeEvent(EventType type, double time, AbstractCase aCase){
+            this.type = type;
+            this.time = time;
+            this.aCase = aCase;
+            this.infectorCase = null;
+        }
 
-                }
-            }
+        private TreeEvent(double time, AbstractCase aCase, AbstractCase infectorCase){
+            this.type = EventType.INFECTION;
+            this.time = time;
+            this.aCase = aCase;
+            this.infectorCase = infectorCase;
         }
 
-        if(transmissionRatePrior!=null) {
-            E += 1 / (transmissionRatePrior.getScale());
+        public double getTime(){
+            return time;
         }
 
-        return E;
+        public EventType getType(){
+            return type;
+        }
 
-    }
+        public AbstractCase getCase(){
+            return aCase;
+        }
 
-    public double getK(){
-        if(transmissionRatePrior != null) {
-            return (transmissionRatePrior.getShape() - 1) + outbreak.infectedSize()-1;
-        } else {
-            return outbreak.infectedSize()-1;
+        public AbstractCase getInfector(){
+            return infectorCase;
         }
-    }
 
-//    private class F extends UnivariateFunction.AbstractLogEvaluatableUnivariateFunction {
-//
-//        final boolean hasGeography;
-//
-//        private F(boolean hasGeography){
-//            this.hasGeography = hasGeography;
-//        }
-//
-//        // index 0 is lambda, index 1 if present is alpha
-//
-//        public double evaluate(double argument) {
-//            return Math.exp(logEvaluate(argument));
-//        }
-//
-//        public double logEvaluate(double argument) {
-//            if(sortedCases == null){
-//                sortCases();
-//            }
-//            double logF = 0;
-//
-//            for(AbstractCase infectee : sortedCases){
-//
-//                AbstractCase infector = treeLikelihood.getInfector(infectee);
-//
-//                if(infector != null) {
-//                    double sum = 0;
-//
-//                    double[] kernelValues = outbreak.getKernelValues(infectee, spatialKernel);
-//
-//                    double infecteeExamined = infectee.getExamTime();
-//
-//                    for (AbstractCase possibleInfector : sortedCases) {
-//                        if (possibleInfector != infectee) {
-//
-//                            double nonInfectorInfected = treeLikelihood.getInfectionTime(possibleInfector);
-//                            double nonInfectorInfectious = treeLikelihood.getInfectiousTime(possibleInfector);
-//                            double nonInfectorNoninfectious = possibleInfector.getCullTime();
-//
-//                            if (nonInfectorInfected > infecteeExamined) {
-//                                break;
-//                            }
-//
-//                            double rate = hasGeography ? kernelValues[outbreak.getCaseIndex(possibleInfector)] : 1;
-//
-//                            if (nonInfectorInfectious <= infecteeExamined) {
-//                                double lastPossibleInfectionTime = Math.min(nonInfectorNoninfectious, infecteeExamined);
-//
-//                                sum += -rate * (lastPossibleInfectionTime - nonInfectorInfectious);
-//                            }
-//                        }
-//
-//                    }
-//                    sum *= argument;
-//
-//                    double normExp = Math.exp(sum);
-//
-//                    double logTerm;
-//
-//                    if(normExp!=1){
-//                        logTerm = Math.log1p(-normExp);
-//                    } else {
-//                        try {
-//                            logTerm = handleDenominatorUnderflow(sum);
-//                        } catch(IllegalArgumentException e){
-//                            throw new RuntimeException("HandleDenominatorUnderflow failed, input = "+sum);
-//                        }
-//                    }
-//
-//                    logF += logTerm;
-//                }
-//            }
-//            return (outbreak.size()-1)*Math.log(argument)-logF;
-//        }
-//
-//
-//
-//        public int getNumArguments() {
-//            return 1;
-//        }
-//
-//        public double getLowerBound() {
-//            return 0;
-//        }
-//
-//        public double getUpperBound() {
-//            return transmissionRate.getBounds().getUpperLimit(0);
-//        }
-//
-//        public double evaluateIntegral(double a, double b) {
-//            return integrator.integrate(this, a, b);
-//        }
-//    }
-
-    private static double handleDenominatorUnderflow(double input){
-        BigDecimal bigDec = new BigDecimal(input);
-        BigDecimal expBigDec = BigDecimalUtils.exp(bigDec, bigDec.scale());
-        BigDecimal one = new BigDecimal(1.0);
-        BigDecimal oneMinusExpBigDec = one.subtract(expBigDec);
-        BigDecimal logOneMinusExpBigDec = BigDecimalUtils.ln(oneMinusExpBigDec, oneMinusExpBigDec.scale());
-        return logOneMinusExpBigDec.doubleValue();
     }
 
     public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
 
         public static final String TRANSMISSION_RATE = "transmissionRate";
-        public static final String INTEGRATOR_STEPS = "integratorSteps";
-        public static final String TRANSMISSION_RATE_PRIOR = "transmissionRatePrior";
+        public static final String INITIAL_INFECTION_TIME_PRIOR = "initialInfectionTimePrior";
 
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
             CaseToCaseTreeLikelihood c2cTL = (CaseToCaseTreeLikelihood)
@@ -569,15 +568,15 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
             SpatialKernel kernel = (SpatialKernel) xo.getChild(SpatialKernel.class);
             Parameter transmissionRate = (Parameter) xo.getElementFirstChild(TRANSMISSION_RATE);
 
-            GammaDistributionModel transmissionRatePrior = null;
+            ParametricDistributionModel iitp = null;
 
-            if(xo.hasChildNamed(TRANSMISSION_RATE_PRIOR)) {
-                transmissionRatePrior = (GammaDistributionModel) xo.getElementFirstChild(TRANSMISSION_RATE_PRIOR);
+            if(xo.hasChildNamed(INITIAL_INFECTION_TIME_PRIOR)){
+                iitp = (ParametricDistributionModel)xo.getElementFirstChild(INITIAL_INFECTION_TIME_PRIOR);
             }
 
 
-            return new CaseToCaseTransmissionLikelihood(CASE_TO_CASE_TRANSMISSION_LIKELIHOOD, c2cTL.getOutbreak(),
-                    c2cTL, kernel, transmissionRate, transmissionRatePrior);
+            return new CaseToCaseTransmissionLikelihood(CASE_TO_CASE_TRANSMISSION_LIKELIHOOD,
+                    (CategoryOutbreak)c2cTL.getOutbreak(), c2cTL, kernel, transmissionRate, iitp);
         }
 
         public XMLSyntaxRule[] getSyntaxRules() {
@@ -601,8 +600,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 new ElementRule(CaseToCaseTreeLikelihood.class, "The tree likelihood"),
                 new ElementRule(SpatialKernel.class, "The spatial kernel", 0, 1),
                 new ElementRule(TRANSMISSION_RATE, Parameter.class, "The transmission rate"),
-                new ElementRule(TRANSMISSION_RATE_PRIOR, GammaDistributionModel.class, "A gamma prior on the base" +
-                        "transmission rate", true)
+                new ElementRule(INITIAL_INFECTION_TIME_PRIOR, ParametricDistributionModel.class, "The prior probability distibution of the first infection", true)
         };
 
     };
@@ -616,23 +614,36 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
 
         columns.add(new LogColumn.Abstract("trans_LL"){
             protected String getFormattedValue() {
-                return String.valueOf(transLogProb - normalisation);
+                return String.valueOf(transLogProb);
             }
         });
 
-        columns.add(new LogColumn.Abstract("geog_LL"){
+        columns.add(new LogColumn.Abstract("period_LL") {
             protected String getFormattedValue() {
-                return String.valueOf(geographyLogProb);
+                return String.valueOf(periodsLogProb);
             }
         });
 
-        columns.add(new LogColumn.Abstract("betaGammaThing"){
+        columns.addAll(Arrays.asList(treeLikelihood.passColumns()));
+
+        for (AbstractPeriodPriorDistribution hyperprior : (outbreak).getInfectiousMap().values()) {
+            columns.addAll(Arrays.asList(hyperprior.getColumns()));
+        }
+
+        columns.add(new LogColumn.Abstract("FirstInfectionTime") {
             protected String getFormattedValue() {
-                return String.valueOf(betaGammaThing);
+                if(sortedTreeEvents==null){
+                    sortEvents();
+                }
+                return String.valueOf(treeLikelihood.getInfectionTime(indexCase));
             }
         });
 
-        columns.addAll(Arrays.asList(treeLikelihood.passColumns()));
+        columns.add(new LogColumn.Abstract("IndexCaseIndex") {
+            protected String getFormattedValue() {
+                return String.valueOf(treeLikelihood.getOutbreak().getCaseIndex(indexCase));
+            }
+        });
 
 
         return columns.toArray(new LogColumn[columns.size()]);
diff --git a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
index bcd7ed8..0a0eeb5 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
@@ -36,8 +36,6 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
-
 import dr.app.tools.NexusExporter;
 import dr.evolution.tree.FlexibleNode;
 import dr.evolution.tree.FlexibleTree;
@@ -281,11 +279,11 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     a descendant of it
      */
 
-    public boolean tipLinked(NodeRef node){
-        return tipLinked(node, branchMap);
+    public boolean isAncestral(NodeRef node){
+        return isAncestral(node, branchMap);
     }
 
-    private boolean tipLinked(NodeRef node, BranchMapModel map){
+    private boolean isAncestral(NodeRef node, BranchMapModel map){
         NodeRef tip = treeModel.getNode(tipMap.get(map.get(node.getNumber())));
         if(tip==node){
             return true;
@@ -301,44 +299,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     }
 
 
-    //Counts the children of the current node which have the same painting as itself under the current map.
-    //This will always be 1 if extended==false.
-
-
 
-    public int countChildrenInSamePartition(NodeRef node, BranchMapModel map){
-        if(treeModel.isExternal(node)){
-            return -1;
-        } else {
-            int count = 0;
-            AbstractCase parentCase = map.get(node.getNumber());
-            for(int i=0; i< treeModel.getChildCount(node); i++){
-                if(map.get(treeModel.getChild(node,i).getNumber())==parentCase){
-                    count++;
-                }
-            }
-            return count;
-        }
-    }
-
-    public int countChildrenInSamePartition(NodeRef node){
-        return countChildrenInSamePartition(node, branchMap);
-    }
-
-
-    public static NodeRef sibling(TreeModel tree, NodeRef node){
-        if(tree.isRoot(node)){
-            return null;
-        } else {
-            NodeRef parent = tree.getParent(node);
-            for(int i=0; i<tree.getChildCount(parent); i++){
-                if(tree.getChild(parent,i)!=node){
-                    return tree.getChild(parent,i);
-                }
-            }
-        }
-        return null;
-    }
 
     // find all partitions of the descendant tips of the current node. If map is specified then it makes a map of node
     // number to possible partitions; map can be null.
@@ -380,145 +341,6 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
         flagForDescendantRecalculation(tree, node, updateNode);
     }
 
-    //Return a set of nodes that are not descendants of (or equal to) the current node and are in the same partition as
-    // it. If flagForRecalc is true, then this also sets the flags for likelihood recalculation for all these nodes
-    // to true
-
-    public HashSet<Integer> samePartitionDownTree(NodeRef node, boolean flagForRecalc){
-        return samePartitionDownTree(node, branchMap, flagForRecalc);
-    }
-
-    public HashSet<Integer> samePartitionDownTree(NodeRef node, BranchMapModel map, boolean flagForRecalc){
-        if(flagForRecalc){
-            flagForDescendantRecalculation(treeModel, node);
-        }
-        HashSet<Integer> out = new HashSet<Integer>();
-        AbstractCase painting = map.get(node.getNumber());
-        NodeRef currentNode = node;
-        NodeRef parentNode = treeModel.getParent(node);
-        while(parentNode!=null && map.get(parentNode.getNumber())==painting){
-            out.add(parentNode.getNumber());
-            if(countChildrenInSamePartition(parentNode)==2){
-                NodeRef otherChild = sibling(treeModel, currentNode);
-                out.add(otherChild.getNumber());
-                out.addAll(samePartitionUpTree(otherChild, map, flagForRecalc));
-            }
-            currentNode = parentNode;
-            parentNode = treeModel.getParent(currentNode);
-        }
-        return out;
-    }
-
-    //Return a set of nodes that are descendants (and not equal to) the current node and are in the same partition as
-    // it.
-
-    public HashSet<Integer> samePartitionUpTree(NodeRef node, boolean flagForRecalc){
-        return samePartitionUpTree(node, branchMap, flagForRecalc);
-    }
-
-    public HashSet<Integer> samePartitionUpTree(NodeRef node, BranchMapModel map, boolean flagForRecalc){
-        HashSet<Integer> out = new HashSet<Integer>();
-        AbstractCase painting = map.get(node.getNumber());
-        boolean creepsFurther = false;
-        for(int i=0; i< treeModel.getChildCount(node); i++){
-            if(map.get(treeModel.getChild(node,i).getNumber())==painting){
-                creepsFurther = true;
-                out.add(treeModel.getChild(node,i).getNumber());
-                out.addAll(samePartitionUpTree(treeModel.getChild(node, i), map, flagForRecalc));
-            }
-        }
-        if(flagForRecalc && !creepsFurther){
-            flagForDescendantRecalculation(treeModel, node);
-        }
-        return out;
-    }
-
-    public Integer[] samePartition(NodeRef node, boolean flagForRecalc){
-        return samePartition(node, branchMap, flagForRecalc);
-    }
-
-    private Integer[] samePartition(NodeRef node, BranchMapModel map, boolean flagForRecalc){
-        HashSet<Integer> out = new HashSet<Integer>();
-        out.add(node.getNumber());
-        out.addAll(samePartitionDownTree(node, map, flagForRecalc));
-        out.addAll(samePartitionUpTree(node, map, flagForRecalc));
-        return out.toArray(new Integer[out.size()]);
-    }
-
-    // returns all nodes that are the earliest nodes in the partitions corresponding to this outbreak' children in
-    // the _transmission_ tree.
-
-    private Integer[] getAllChildInfectionNodes(AbstractCase thisCase){
-        HashSet<Integer> out = new HashSet<Integer>();
-        NodeRef tip = treeModel.getNode(tipMap.get(thisCase));
-        Integer[] partition = samePartition(tip, false);
-        for (Integer i : partition) {
-            NodeRef node = treeModel.getNode(i);
-            if (!treeModel.isExternal(node)) {
-                for (int j = 0; j < treeModel.getChildCount(node); j++) {
-                    NodeRef child = treeModel.getChild(node, j);
-                    if(branchMap.get(child.getNumber())!=thisCase){
-                        out.add(child.getNumber());
-                    }
-                }
-            }
-        }
-        return out.toArray(new Integer[out.size()]);
-    }
-
-    public NodeRef getTip(AbstractCase thisCase){
-        return treeModel.getNode(tipMap.get(thisCase));
-    }
-
-    public NodeRef getEarliestNodeInPartition(AbstractCase thisCase, BranchMapModel branchMap){
-        if(thisCase.wasEverInfected()) {
-            NodeRef child = treeModel.getNode(tipMap.get(thisCase));
-            NodeRef parent = treeModel.getParent(child);
-            boolean transmissionFound = false;
-            while (!transmissionFound) {
-                if (branchMap.get(child.getNumber()) != branchMap.get(parent.getNumber())) {
-                    transmissionFound = true;
-                } else {
-                    child = parent;
-                    parent = treeModel.getParent(child);
-                    if (parent == null) {
-                        transmissionFound = true;
-                    }
-                }
-            }
-            return child;
-        }
-        return null;
-    }
-
-    public NodeRef getEarliestNodeInPartition(AbstractCase thisCase){
-        return getEarliestNodeInPartition(thisCase, branchMap);
-    }
-
-    public HashSet<AbstractCase> getDescendants(AbstractCase thisCase){
-        HashSet<AbstractCase> out = new HashSet<AbstractCase>(getInfectees(thisCase));
-
-        if(thisCase.wasEverInfected()) {
-            for (AbstractCase child : out) {
-                out.addAll(getDescendants(child));
-            }
-        }
-        return out;
-    }
-
-
-    public Integer[] getParentsArray(){
-        Integer[] out = new Integer[outbreak.size()];
-        for(AbstractCase thisCase : outbreak.getCases()){
-            if(thisCase.wasEverInfected()) {
-                out[outbreak.getCaseIndex(thisCase)] = outbreak.getCaseIndex(getInfector(thisCase));
-            } else {
-                out[outbreak.getCaseIndex(thisCase)] = null;
-            }
-        }
-        return out;
-    }
-
 
     // **************************************************************
     // ModelListener IMPLEMENTATION
@@ -721,63 +543,6 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     }
 
 
-    /* Return the case which infected this case */
-
-    public AbstractCase getInfector(AbstractCase thisCase){
-        return getInfector(thisCase, branchMap);
-    }
-
-    public AbstractCase getInfector(int i){
-        return getInfector(getOutbreak().getCase(i), branchMap);
-    }
-
-    /* Return the case which was the infector in the infection event represented by this node */
-
-    public AbstractCase getInfector(NodeRef node){
-        return getInfector(node, branchMap);
-    }
-
-    public AbstractCase getInfector(AbstractCase thisCase, BranchMapModel branchMap){
-        if(thisCase.wasEverInfected()) {
-            NodeRef tip = treeModel.getNode(tipMap.get(thisCase));
-            return getInfector(tip, branchMap);
-        }
-        return null;
-    }
-
-    public AbstractCase getRootCase(){
-        return branchMap.get(treeModel.getRoot().getNumber());
-    }
-
-    public HashSet<AbstractCase> getInfectees(AbstractCase thisCase){
-        return getInfectees(thisCase, branchMap);
-    }
-
-    public HashSet<AbstractCase> getInfectees(AbstractCase thisCase, BranchMapModel branchMap){
-        if(thisCase.wasEverInfected()) {
-            return getInfecteesInClade(getEarliestNodeInPartition(thisCase), branchMap);
-        }
-        return new HashSet<AbstractCase>();
-    }
-
-    public HashSet<AbstractCase> getInfecteesInClade(NodeRef node, BranchMapModel branchMap){
-        HashSet<AbstractCase> out = new HashSet<AbstractCase>();
-        if(treeModel.isExternal(node)){
-            return out;
-        } else {
-            AbstractCase thisCase = branchMap.get(node.getNumber());
-            for(int i=0; i<treeModel.getChildCount(node); i++){
-                NodeRef child = treeModel.getChild(node, i);
-                AbstractCase childCase = branchMap.get(child.getNumber());
-                if(childCase!=thisCase){
-                    out.add(childCase);
-                } else {
-                    out.addAll(getInfecteesInClade(child, branchMap));
-                }
-            }
-            return out;
-        }
-    }
 
     public double getInfectionTime(AbstractCase thisCase){
 
@@ -785,7 +550,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
             return infectionTimes[outbreak.getCaseIndex(thisCase)];
         } else {
             if(thisCase.wasEverInfected()) {
-                NodeRef child = getEarliestNodeInPartition(thisCase);
+                NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(thisCase);
                 NodeRef parent = treeModel.getParent(child);
 
                 if (parent != null) {
@@ -832,7 +597,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     public void setInfectionHeight(AbstractCase thisCase, double height){
         if(thisCase.wasEverInfected()) {
-            NodeRef child = getEarliestNodeInPartition(thisCase);
+            NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(thisCase);
             NodeRef parent = treeModel.getParent(child);
 
             double minHeight = treeModel.getNodeHeight(child);
@@ -1043,53 +808,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
         return getInfectionTime(rootCase);
     }
 
-    public AbstractCase getInfector(NodeRef node, BranchMapModel branchMap){
-        if(treeModel.isRoot(node) || node.getNumber() == treeModel.getRoot().getNumber()){
-            return null;
-        } else {
-            AbstractCase nodeCase = branchMap.get(node.getNumber());
-            if(branchMap.get(treeModel.getParent(node).getNumber())!=nodeCase){
-                return branchMap.get(treeModel.getParent(node).getNumber());
-            } else {
-                return getInfector(treeModel.getParent(node), branchMap);
-            }
-        }
-    }
-
-    public boolean checkPartitions(){
-        return checkPartitions(branchMap, true);
-    }
-
-    protected boolean checkPartitions(BranchMapModel map, boolean verbose){
-        boolean foundProblem = false;
-        for(int i=0; i<treeModel.getInternalNodeCount(); i++){
-            boolean foundTip = false;
-            for(Integer nodeNumber : samePartition(treeModel.getInternalNode(i), map, false)){
-                if(treeModel.isExternal(treeModel.getNode(nodeNumber))){
-                    foundTip = true;
-                }
-            }
-            if(!foundProblem && !foundTip){
-                foundProblem = true;
-                if(verbose){
-                    System.out.println("Node "+(i+treeModel.getExternalNodeCount()) + " is not connected to a tip");
-                }
-            }
-
-        }
-        if(foundProblem){
-            debugOutputTree(map, "checkPartitionProblem", false);
-            throw new BadPartitionException("Tree is not partitioned properly");
-        }
-        return !foundProblem;
-    }
-
 
-    /* Return the partition of the parent of this node */
-
-    public AbstractCase getParentCase(NodeRef node){
-        return branchMap.get(treeModel.getParent(node).getNumber());
-    }
 
     /* Populates the branch map for external nodes */
 
@@ -1151,7 +870,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
         }
         NodeRef root = treeModel.getRoot();
         specificallyPartitionUpwards(root, firstCase, map);
-        if(!checkPartitions()){
+        if(!((PartitionedTreeModel)treeModel).checkPartitions()){
             throw new RuntimeException("Given starting network is not compatible with the starting tree");
         }
 
@@ -1163,7 +882,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
             return;
         }
         branchMap.set(node.getNumber(), thisCase, true);
-        if(tipLinked(node)){
+        if(isAncestral(node)){
             for(int i=0; i<treeModel.getChildCount(node); i++){
                 specificallyPartitionUpwards(treeModel.getChild(node, i), thisCase, map);
             }
@@ -1234,7 +953,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                 String tipName = extNode.taxon.toString();
                 String infector;
                 try{
-                    infector = getInfector(extNode).getName();
+                    infector = ((PartitionedTreeModel)treeModel).getInfector(extNode).getName();
                 } catch(NullPointerException e){
                     infector = "Start";
                 }
@@ -1401,7 +1120,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
         for(AbstractCase aCase : outbreak.getCases()){
             if(aCase.wasEverInfected()) {
-                NodeRef originalNode = getEarliestNodeInPartition(aCase);
+                NodeRef originalNode = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(aCase);
 
                 int infectionNodeNo = originalNode.getNumber();
                 if (!treeModel.isRoot(originalNode)) {
@@ -1418,6 +1137,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     infectionNode.setHeight(heightToBreakBranch);
                     infectionNode.setLength(oldParent.getHeight() - heightToBreakBranch);
                     infectionNode.setAttribute(PARTITIONS_KEY, getNodePartition(treeModel, originalParent));
+                    infectionNode.setAttribute("Time", heightToTime(heightToBreakBranch));
                     newNode.setLength(nodeTime - infectionTime);
 
                     outTree.addChild(oldParent, infectionNode);
@@ -1431,6 +1151,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     outTree.beginTreeEdit();
                     FlexibleNode infectionNode = new FlexibleNode();
                     infectionNode.setHeight(heightToInstallRoot);
+                    infectionNode.setAttribute("Time", heightToTime(heightToInstallRoot));
                     infectionNode.setAttribute(PARTITIONS_KEY, "The_Ether");
                     outTree.addChild(infectionNode, newNode);
                     newNode.setLength(heightToInstallRoot - getHeight(originalNode));
@@ -1454,7 +1175,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     e.printStackTrace();
                 }
                 try{
-                    checkPartitions();
+                    ((PartitionedTreeModel)treeModel).checkPartitions();
                 } catch(BadPartitionException e){
                     System.out.print("Rewiring messed up because of partition problem.");
                 }
@@ -1482,10 +1203,10 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
             if(infected.wasEverInfected()) {
                 columns[count] = new LogColumn.Abstract(infected.toString() + "_infector") {
                     protected String getFormattedValue() {
-                        if (getInfector(infected) == null) {
+                        if (((PartitionedTreeModel)treeModel).getInfector(infected) == null) {
                             return "Start";
                         } else {
-                            return getInfector(infected).toString();
+                            return ((PartitionedTreeModel)treeModel).getInfector(infected).toString();
                         }
                     }
                 };
@@ -1691,6 +1412,22 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
         }
     }
 
+    public Integer[] getParentsArray(){
+        Integer[] out = new Integer[outbreak.size()];
+        for(AbstractCase thisCase : outbreak.getCases()){
+            if(thisCase.wasEverInfected()) {
+                out[outbreak.getCaseIndex(thisCase)] = outbreak.getCaseIndex(((PartitionedTreeModel)treeModel).getInfector(thisCase));
+            } else {
+                out[outbreak.getCaseIndex(thisCase)] = null;
+            }
+        }
+        return out;
+    }
+
+    public AbstractCase getInfector(int i){
+        return ((PartitionedTreeModel)treeModel).getInfector(getOutbreak().getCase(i));
+    }
+
 }
 
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java b/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
index c10139a..1718396 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
@@ -59,6 +59,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
     private final HashSet<String> infectiousCategories;
     private final HashMap<String, Parameter> latentMap;
     private final HashMap<String, AbstractPeriodPriorDistribution> infectiousMap;
+    private final HashMap<AbstractCase, Double> weightMap;
     private double[][] distances;
 
 
@@ -77,23 +78,26 @@ public class CategoryOutbreak extends AbstractOutbreak {
         for(Parameter hyperprior : latentMap.values()){
             addVariable(hyperprior);
         }
+        weightMap = new HashMap<AbstractCase, Double>();
     }
 
 
     private void addCase(String caseID, double examTime, double cullTime, Parameter coords,
-                         Parameter infectionPosition, Taxa associatedTaxa, String infectiousCategory,
-                         String latentCategory){
+                         Parameter infectionPosition, Taxa associatedTaxa, double indexPriorWeight,
+                         String infectiousCategory, String latentCategory){
         CategoryCase thisCase;
 
         if(latentCategory==null){
             thisCase =  new CategoryCase(caseID, examTime, cullTime, coords, infectionPosition, associatedTaxa,
-                    infectiousCategory);
+                    indexPriorWeight, infectiousCategory);
         } else {
             thisCase =
                     new CategoryCase(caseID, examTime, cullTime, coords, infectionPosition, associatedTaxa,
-                            infectiousCategory, latentCategory);
+                            indexPriorWeight, infectiousCategory, latentCategory);
             latentCategories.add(latentCategory);
         }
+        weightMap.put(thisCase, indexPriorWeight);
+
         infectiousCategories.add(infectiousCategory);
         cases.add(thisCase);
         infectedSize++;
@@ -102,7 +106,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
 
     private void addNoninfectedCase(String caseID, Parameter coords){
         CategoryCase thisCase = new CategoryCase(caseID, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, coords,
-                null, null, null);
+                null, null, 0.0, null);
         thisCase.setEverInfected(false);
 
         cases.add(thisCase);
@@ -110,7 +114,9 @@ public class CategoryOutbreak extends AbstractOutbreak {
     }
 
 
-
+    public HashMap<AbstractCase, Double> getWeightMap(){
+        return weightMap;
+    }
 
     public HashSet<String> getLatentCategories(){
         return latentCategories;
@@ -208,10 +214,12 @@ public class CategoryOutbreak extends AbstractOutbreak {
         private String infectiousCategory;
         private String latentCategory;
         private Parameter coords;
+        private double indexPriorWeight;
 
 
         private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
-                             Parameter infectionBranchPosition, Taxa associatedTaxa, String infectiousCategory){
+                             Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
+                             String infectiousCategory){
             super(name);
             wasEverInfected = true;
             this.caseID = caseID;
@@ -224,30 +232,41 @@ public class CategoryOutbreak extends AbstractOutbreak {
             endOfInfectiousTime = cullTime;
             this.associatedTaxa = associatedTaxa;
             this.coords = coords;
+            this.indexPriorWeight = indexPriorWeight;
             latentCategory = null;
         }
 
+        private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
+                             Parameter infectionBranchPosition, Taxa associatedTaxa,
+                             String infectiousCategory){
+            this(name, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa, 1.0,
+                    infectiousCategory);
+
+        }
+
 
         private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
-                             Parameter infectionBranchPosition, Taxa associatedTaxa, String infectiousCategory,
-                             String latentCategory){
-            this(name, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa, infectiousCategory);
+                             Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
+                             String infectiousCategory, String latentCategory){
+            this(name, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa, indexPriorWeight,
+                    infectiousCategory);
             this.latentCategory = latentCategory;
         }
 
 
         private CategoryCase(String caseID, double examTime, double cullTime, Parameter coords,
-                             Parameter infectionBranchPosition, Taxa associatedTaxa, String infectiousCategory){
+                             Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
+                             String infectiousCategory){
             this(CATEGORY_CASE, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa,
-                    infectiousCategory);
+                    indexPriorWeight, infectiousCategory);
         }
 
 
         private CategoryCase(String caseID, double examTime, double cullTime, Parameter coords,
-                             Parameter infectionBranchPosition, Taxa associatedTaxa,
+                             Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
                              String infectiousCategory, String latentCategory){
             this(CATEGORY_CASE, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa,
-                    infectiousCategory, latentCategory);
+                    indexPriorWeight, infectiousCategory, latentCategory);
         }
 
         //noninfected susceptible constructor
@@ -260,7 +279,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
             return infectiousCategory;
         }
 
-
+        public double getIndexPriorWeight() { return indexPriorWeight;}
 
         public boolean culledYet(double time) {
             return time>endOfInfectiousTime;
@@ -315,6 +334,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
         public static final String LATENT_CATEGORY = "latentCategory";
         public static final String INFECTIOUS_CATEGORY = "infectiousCategory";
         public static final String WAS_EVER_INFECTED = "wasEverInfected";
+        public static final String INDEX_PRIOR_WEIGHT = "indexPriorWeight";
 
         //for the normal-gamma priors
 
@@ -430,6 +450,13 @@ public class CategoryOutbreak extends AbstractOutbreak {
                 } else if (expectLatentPeriods) {
                     throw new XMLParseException("Case " + farmID + " not assigned a latent periods distribution");
                 }
+
+                double indexPriorWeight = 1;
+
+                if(xo.hasAttribute(INDEX_PRIOR_WEIGHT)){
+                    indexPriorWeight = Double.parseDouble((String)xo.getAttribute(INDEX_PRIOR_WEIGHT));
+                }
+
                 final Parameter ibp = (Parameter) xo.getElementFirstChild(INFECTION_TIME_BRANCH_POSITION);
 
 
@@ -439,7 +466,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
                         taxa.addTaxon((Taxon) xo.getChild(i));
                     }
                 }
-                outbreak.addCase(farmID, examTime, cullTime, coords, ibp, taxa, infectiousCategory, latentCategory);
+                outbreak.addCase(farmID, examTime, cullTime, coords, ibp, taxa, indexPriorWeight, infectiousCategory, latentCategory);
             } else {
                 outbreak.addNoninfectedCase(farmID, coords);
 
@@ -474,7 +501,9 @@ public class CategoryOutbreak extends AbstractOutbreak {
                         " along which the infection of this case occurs that it actually does occur", true),
                 new ElementRule(COORDINATES, Parameter.class, "The spatial coordinates of this case", true),
                 new StringAttributeRule(LATENT_CATEGORY, "The category of latent period", true),
-                new StringAttributeRule(INFECTIOUS_CATEGORY, "The category of infectious period", true)
+                new StringAttributeRule(INFECTIOUS_CATEGORY, "The category of infectious period", true),
+                new StringAttributeRule(INDEX_PRIOR_WEIGHT, "The weight of this case in the prior probabilty for the" +
+                        "index case", true)
         };
 
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
index 65e36b9..fccd55d 100644
--- a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
+++ b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
@@ -25,6 +25,7 @@
 
 package dr.evomodel.epidemiology.casetocase;
 
+import dr.app.beauti.util.TreeUtils;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.tree.TreeModel;
@@ -33,11 +34,7 @@ import dr.inference.model.*;
 import dr.xml.XMLObject;
 import dr.xml.XMLParseException;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-
-import java.util.Set;
+import java.util.*;
 
 /**
  * TreeModel plus partition information
@@ -207,4 +204,260 @@ public class PartitionedTreeModel extends TreeModel {
         flushQueue();
     }
 
+    public boolean checkPartitions(){
+        return checkPartitions(branchMap, true);
+    }
+
+    protected boolean checkPartitions(BranchMapModel map, boolean verbose){
+        boolean foundProblem = false;
+        for(int i=0; i<getInternalNodeCount(); i++){
+            boolean foundTip = false;
+            for(Integer nodeNumber : samePartitionElement(getInternalNode(i))){
+                if(isExternal(getNode(nodeNumber))){
+                    foundTip = true;
+                }
+            }
+            if(!foundProblem && !foundTip){
+                foundProblem = true;
+                if(verbose){
+                    System.out.println("Node "+(i+getExternalNodeCount()) + " is not connected to a tip");
+                }
+            }
+
+        }
+        return !foundProblem;
+    }
+
+    //Return a set of nodes that are not descendants of (or equal to) the current node and are in the same partition as
+    // it. If flagForRecalc is true, then this also sets the flags for likelihood recalculation for all these nodes
+    // to true
+
+
+    public HashSet<Integer> samePartitionDownTree(NodeRef node){
+
+        HashSet<Integer> out = new HashSet<Integer>();
+        AbstractCase painting = branchMap.get(node.getNumber());
+        NodeRef currentNode = node;
+        NodeRef parentNode = getParent(node);
+        while(parentNode!=null && branchMap.get(parentNode.getNumber())==painting){
+            out.add(parentNode.getNumber());
+            if(countChildrenInSamePartition(parentNode)==2){
+                NodeRef otherChild = sibling(this, currentNode);
+                out.add(otherChild.getNumber());
+                out.addAll(samePartitionUpTree(otherChild));
+            }
+            currentNode = parentNode;
+            parentNode = getParent(currentNode);
+        }
+        return out;
+    }
+
+    //Return a set of nodes that are descendants (and not equal to) the current node and are in the same partition as
+    // it.
+
+
+
+    public HashSet<Integer> samePartitionUpTree(NodeRef node){
+        HashSet<Integer> out = new HashSet<Integer>();
+        AbstractCase painting = branchMap.get(node.getNumber());
+        for(int i=0; i< getChildCount(node); i++){
+            if(branchMap.get(getChild(node,i).getNumber())==painting){
+                out.add(getChild(node,i).getNumber());
+                out.addAll(samePartitionUpTree(getChild(node, i)));
+            }
+        }
+        return out;
+    }
+
+
+    public Integer[] samePartitionElement(NodeRef node){
+        HashSet<Integer> out = new HashSet<Integer>();
+        out.add(node.getNumber());
+        out.addAll(samePartitionDownTree(node));
+        out.addAll(samePartitionUpTree(node));
+        return out.toArray(new Integer[out.size()]);
+    }
+
+    private int[] allTipsForThisCase(AbstractCase thisCase){
+        ArrayList<Integer> listOfRefs = new ArrayList<Integer>();
+
+        for(int i=0; i<getExternalNodeCount(); i++){
+            if(branchMap.get(i)==thisCase){
+                listOfRefs.add(i);
+            }
+
+        }
+
+        int[] out = new int[listOfRefs.size()];
+
+        for(int i=0; i<out.length; i++){out[i] = listOfRefs.get(i);}
+
+        return out;
+
+    }
+
+
+    public NodeRef getEarliestNodeInPartition(AbstractCase thisCase){
+        if(thisCase.wasEverInfected()) {
+
+            int[] tips = allTipsForThisCase(thisCase);
+
+            NodeRef tipMRCA = Tree.Utils.getCommonAncestor(this, tips);
+
+            if(branchMap.get(tipMRCA.getNumber())!=thisCase){
+                throw new BadPartitionException("Node partition disconnected");
+            }
+
+            NodeRef child = tipMRCA;
+            NodeRef parent = getParent(child);
+            boolean transmissionFound = false;
+            while (!transmissionFound) {
+                if (branchMap.get(child.getNumber()) != branchMap.get(parent.getNumber())) {
+                    transmissionFound = true;
+                } else {
+                    child = parent;
+                    parent = getParent(child);
+                    if (parent == null) {
+                        transmissionFound = true;
+                    }
+                }
+            }
+            return child;
+        }
+        return null;
+    }
+
+    public HashSet<AbstractCase> getDescendants(AbstractCase thisCase){
+        HashSet<AbstractCase> out = new HashSet<AbstractCase>(getInfectees(thisCase));
+
+        if(thisCase.wasEverInfected()) {
+            for (AbstractCase child : out) {
+                out.addAll(getDescendants(child));
+            }
+        }
+        return out;
+    }
+
+
+
+
+    /* Return the case that infected this case */
+
+    /* Return the case which was the infector in the infection event represented by this node */
+
+    public AbstractCase getInfector(AbstractCase thisCase){
+        if(thisCase.wasEverInfected()) {
+            int[] tips = allTipsForThisCase(thisCase);
+
+            NodeRef tipMRCA = Tree.Utils.getCommonAncestor(this, tips);
+
+            if(branchMap.get(tipMRCA.getNumber())!=thisCase){
+                throw new BadPartitionException("Node partition disconnected");
+            }
+
+            NodeRef currentNode = tipMRCA;
+
+            while(branchMap.get(currentNode.getNumber())==thisCase){
+                currentNode = getParent(currentNode);
+                if(currentNode==null){
+                    return null;
+                }
+            }
+            return branchMap.get(currentNode.getNumber());
+
+
+        }
+        return null;
+    }
+
+    public AbstractCase getRootCase(){
+        return branchMap.get(getRoot().getNumber());
+    }
+
+
+
+    public HashSet<AbstractCase> getInfectees(AbstractCase thisCase){
+        if(thisCase.wasEverInfected()) {
+            return getInfecteesInClade(getEarliestNodeInPartition(thisCase));
+        }
+        return new HashSet<AbstractCase>();
+    }
+
+    public HashSet<AbstractCase> getInfecteesInClade(NodeRef node){
+        HashSet<AbstractCase> out = new HashSet<AbstractCase>();
+        if(isExternal(node)){
+            return out;
+        } else {
+            AbstractCase thisCase = branchMap.get(node.getNumber());
+            for(int i=0; i<getChildCount(node); i++){
+                NodeRef child = getChild(node, i);
+                AbstractCase childCase = branchMap.get(child.getNumber());
+                if(childCase!=thisCase){
+                    out.add(childCase);
+                } else {
+                    out.addAll(getInfecteesInClade(child));
+                }
+            }
+            return out;
+        }
+    }
+
+    //infector of the case assigned to this node
+
+    public AbstractCase getInfector(NodeRef node){
+        if(isRoot(node) || node.getNumber() == getRoot().getNumber()){
+            return null;
+        } else {
+            AbstractCase nodeCase = branchMap.get(node.getNumber());
+            if(branchMap.get(getParent(node).getNumber())!=nodeCase){
+                return branchMap.get(getParent(node).getNumber());
+            } else {
+                return getInfector(getParent(node));
+            }
+        }
+    }
+
+
+    /* Return the partition of the parent of this node */
+
+    public AbstractCase getParentCase(NodeRef node){
+        return branchMap.get(getParent(node).getNumber());
+    }
+
+
+    //Counts the children of the current node which are in the same partition element as itself
+
+
+
+    public int countChildrenInSamePartition(NodeRef node){
+        if(isExternal(node)){
+            return -1;
+        } else {
+            int count = 0;
+            AbstractCase parentCase = branchMap.get(node.getNumber());
+            for(int i=0; i< getChildCount(node); i++){
+                if(branchMap.get(getChild(node,i).getNumber())==parentCase){
+                    count++;
+                }
+            }
+            return count;
+        }
+    }
+
+
+
+    public static NodeRef sibling(TreeModel tree, NodeRef node){
+        if(tree.isRoot(node)){
+            return null;
+        } else {
+            NodeRef parent = tree.getParent(node);
+            for(int i=0; i<tree.getChildCount(parent); i++){
+                if(tree.getChild(parent,i)!=node){
+                    return tree.getChild(parent,i);
+                }
+            }
+        }
+        return null;
+    }
+
 }
diff --git a/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java b/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
index 85a4e36..47657e2 100644
--- a/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
+++ b/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
@@ -69,9 +69,6 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
     private DemographicModel demoModel;
     private Mode mode;
 
-
-    private double infectiousPeriodsLogLikelihood;
-    private double storedInfectiousPeriodsLogLikelihood;
     private double coalescencesLogLikelihood;
     private double storedCoalescencesLogLikelihood;
 
@@ -108,60 +105,8 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
         //checkPartitions();
 
-        if(DEBUG){
-
-            super.debugOutputTree("bleh.nex", true);
-        }
-
         double logL = 0;
 
-        // you shouldn't need to do this, because C2CTransL will already have done it
-
-        // super.prepareTimings();
-
-        HashMap<String, ArrayList<Double>> infectiousPeriodsByCategory
-                = new HashMap<String, ArrayList<Double>>();
-
-        // todo do this only once? Using indexes?
-
-        for (AbstractCase aCase : outbreak.getCases()) {
-            if(aCase.wasEverInfected()) {
-
-                String category = ((CategoryOutbreak) outbreak).getInfectiousCategory(aCase);
-
-                if (!infectiousPeriodsByCategory.keySet().contains(category)) {
-                    infectiousPeriodsByCategory.put(category, new ArrayList<Double>());
-                }
-
-                ArrayList<Double> correspondingList
-                        = infectiousPeriodsByCategory.get(category);
-
-                correspondingList.add(getInfectiousPeriod(aCase));
-            }
-        }
-
-        infectiousPeriodsLogLikelihood = 0;
-
-        for (String category : ((CategoryOutbreak) outbreak).getInfectiousCategories()) {
-
-            Double[] infPeriodsInThisCategory = infectiousPeriodsByCategory.get(category)
-                    .toArray(new Double[infectiousPeriodsByCategory.size()]);
-
-            AbstractPeriodPriorDistribution hyperprior = ((CategoryOutbreak) outbreak)
-                    .getInfectiousCategoryPrior(category);
-
-            double[] values = new double[infPeriodsInThisCategory.length];
-
-            for (int i = 0; i < infPeriodsInThisCategory.length; i++) {
-                values[i] = infPeriodsInThisCategory[i];
-            }
-
-            infectiousPeriodsLogLikelihood += hyperprior.getLogLikelihood(values);
-
-        }
-
-        logL += infectiousPeriodsLogLikelihood;
-
         explodeTree();
 
         coalescencesLogLikelihood = 0;
@@ -175,13 +120,11 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
                 // and then the little tree calculations
 
-                HashSet<AbstractCase> children = getInfectees(aCase);
+                HashSet<AbstractCase> children = ((PartitionedTreeModel)treeModel).getInfectees(aCase);
 
                 if (recalculateCoalescentFlags[number]) {
                     Treelet treelet = partitionsAsTrees.get(aCase);
 
-
-
                     if (children.size() != 0) {
                         SpecifiedZeroCoalescent coalescent = new SpecifiedZeroCoalescent(treelet, demoModel,
                                 treelet.getZeroHeight(), mode == Mode.TRUNCATE);
@@ -223,7 +166,6 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
 
         storedCoalescencesLogLikelihood = coalescencesLogLikelihood;
-        storedInfectiousPeriodsLogLikelihood = infectiousPeriodsLogLikelihood;
 
     }
 
@@ -234,7 +176,6 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
 
         coalescencesLogLikelihood = storedCoalescencesLogLikelihood;
-        infectiousPeriodsLogLikelihood = storedInfectiousPeriodsLogLikelihood;
 
     }
 
@@ -279,7 +220,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
                 recalculateCaseWCC(thisCase);
 
-                AbstractCase parent = getInfector(thisCase);
+                AbstractCase parent = ((PartitionedTreeModel)treeModel).getInfector(thisCase);
 
                 if(parent!=null){
                     recalculateCaseWCC(parent);
@@ -320,7 +261,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
             AbstractCase aCase = outbreak.getCase(i);
             if(aCase.wasEverInfected() && partitionsAsTrees.get(aCase)==null){
 
-                NodeRef partitionRoot = getEarliestNodeInPartition(aCase);
+                NodeRef partitionRoot = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(aCase);
 
                 double extraHeight;
 
@@ -345,40 +286,9 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
                 littleTree.resolveTree();
 
-                double sampleTipHeight = 0;
-
-                if(littleTree.getExternalNodeCount()>1) {
-                    for (int j = 0; j < littleTree.getExternalNodeCount(); j++) {
-                        NodeRef node = littleTree.getExternalNode(j);
-                        if (!littleTree.getNodeTaxon(node).getId().startsWith("Transmission_")) {
-                            sampleTipHeight = littleTree.getNodeHeight(node);
-                            break;
-                        }
-
-                    }
-                }
-
-
-
-
-
                 Treelet treelet = new Treelet(littleTree,
                         littleTree.getRootHeight() + extraHeight);
 
-
-
-
-//                if(sampleTipHeight==-1){
-//                    System.out.println();
-//                }
-
-//                double heightPlusRB = treelet.getZeroHeight() - sampleTipHeight;
-//                double infectedTime = aCase.examTime - getInfectionTime(aCase);
-//
-//                if(heightPlusRB!=infectedTime){
-//                    System.out.println();
-//                }
-
                 partitionsAsTrees.put(aCase, treelet);
 
 
@@ -392,7 +302,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
     private ArrayList<AbstractCase> traverseTransmissionTree(AbstractCase aCase){
         ArrayList<AbstractCase> out = new ArrayList<AbstractCase>();
-        HashSet<AbstractCase> children = getInfectees(aCase);
+        HashSet<AbstractCase> children = ((PartitionedTreeModel)treeModel).getInfectees(aCase);
         for(int i=0; i<getOutbreak().size(); i++){
             AbstractCase possibleChild = getOutbreak().getCase(i);
             // easiest way to maintain the set ordering of the outbreak?
@@ -665,15 +575,6 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
         if(outbreak instanceof CategoryOutbreak) {
 
-            for (AbstractPeriodPriorDistribution hyperprior : ((CategoryOutbreak) outbreak).getInfectiousMap().values()) {
-                columns.addAll(Arrays.asList(hyperprior.getColumns()));
-            }
-
-            columns.add(new LogColumn.Abstract("inf_LL") {
-                protected String getFormattedValue() {
-                    return String.valueOf(infectiousPeriodsLogLikelihood);
-                }
-            });
             for (int i = 0; i < outbreak.size(); i++) {
                 if(outbreak.getCase(i).wasEverInfected()) {
                     final int finalI = i;
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchGibbsOperator.java b/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchGibbsOperator.java
deleted file mode 100644
index 4e48916..0000000
--- a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchGibbsOperator.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * InfectionBranchGibbsOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.evomodel.epidemiology.casetocase.operators;
-
-import dr.evolution.tree.NodeRef;
-import dr.evolution.tree.Tree;
-import dr.evomodel.epidemiology.casetocase.*;
-import dr.inference.operators.GibbsOperator;
-import dr.inference.operators.OperatorFailedException;
-import dr.inference.operators.SimpleMCMCOperator;
-import dr.math.MathUtils;
-import dr.xml.*;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-
-/**
- * Goes through every case in turn, except the root case whose infection branch is fixed given all the others, and
- * calculates the probability of each legal branch placement for the infection (if there is more than one). Chooses a
- * new one with proportional probability.
- *
- * @author Matthew Hall
- */
-public class InfectionBranchGibbsOperator extends SimpleMCMCOperator implements GibbsOperator {
-
-    public static final String INFECTION_BRANCH_GIBBS_OPERATOR = "infectionBranchGibbsOperator";
-    private CaseToCaseTransmissionLikelihood c2cTransLikelihood;
-    private CaseToCaseTreeLikelihood c2cTreeLikelihood;
-    private static final boolean DEBUG = false;
-
-    public InfectionBranchGibbsOperator(CaseToCaseTransmissionLikelihood c2cTransLikelihood, double weight){
-        this.c2cTransLikelihood = c2cTransLikelihood;
-        c2cTreeLikelihood = c2cTransLikelihood.getTreeLikelihood();
-        setWeight(weight);
-    }
-
-    public int getStepCount() {
-        return c2cTreeLikelihood.getOutbreak().size()-1;
-    }
-
-    public String getPerformanceSuggestion() {
-        return null;
-    }
-
-    public String getOperatorName() {
-        return INFECTION_BRANCH_GIBBS_OPERATOR;
-    }
-
-    public double doOperation() throws OperatorFailedException {
-
-        ArrayList<AbstractCase> caseList = new ArrayList<AbstractCase>(c2cTreeLikelihood.getOutbreak().getCases());
-
-        int[] shuffledOrder = MathUtils.shuffled(caseList.size());
-
-        for(int i : shuffledOrder){
-
-            if(DEBUG){
-                String caseName = caseList.get(i).getName();
-                c2cTreeLikelihood.debugOutputTree(caseName + "_before.nex", false);
-            }
-
-            pickBranch(caseList.get(i));
-        }
-
-        if(DEBUG){
-            c2cTreeLikelihood.checkPartitions();
-        }
-
-        return 0;
-    }
-
-    private void pickBranch(AbstractCase aCase){
-
-        AbstractCase anInfector = c2cTreeLikelihood.getInfector(aCase);
-
-        if(anInfector==null){
-            // can't move the root case - there must be one and if all the other outbreak' infection branches are known
-            // then this must be it
-            return;
-        }
-
-        PartitionedTreeModel tree = c2cTreeLikelihood.getTreeModel();
-
-        BranchMapModel originalBranchMap = c2cTreeLikelihood.getBranchMap();
-
-        NodeRef tip1 = c2cTreeLikelihood.getTip(aCase);
-        NodeRef tip2 = c2cTreeLikelihood.getTip(anInfector);
-
-        NodeRef mrca = Tree.Utils.getCommonAncestor(tree, tip1, tip2);
-
-        ArrayList<NodeRef> leftBridge = new ArrayList<NodeRef>();
-        NodeRef currentNode = tip1;
-
-        while(currentNode!=mrca){
-            leftBridge.add(currentNode);
-            currentNode = c2cTreeLikelihood.getTreeModel().getParent(currentNode);
-        }
-
-        ArrayList<NodeRef> rightBridge = new ArrayList<NodeRef>();
-        currentNode = tip2;
-
-        while(currentNode!=mrca){
-            rightBridge.add(currentNode);
-            currentNode = c2cTreeLikelihood.getTreeModel().getParent(currentNode);
-        }
-
-        double[] logProbabilities = new double[leftBridge.size()+rightBridge.size()];
-
-        AbstractCase[][] branchMaps = new AbstractCase[leftBridge.size()+rightBridge.size()][];
-
-        // left bridge
-
-        HashSet<Integer> nodesToChange = c2cTreeLikelihood.samePartitionDownTree(tip1, false);
-
-        AbstractCase[] tempBranchMap = Arrays.copyOf(originalBranchMap.getArrayCopy(), tree.getNodeCount());
-
-        for(Integer number : nodesToChange){
-            if(!tree.isExternal(tree.getNode(number))){
-                tempBranchMap[number] = anInfector;
-            }
-        }
-
-        // at this point only the tip is in its partition. Step-by-step, add the left bridge.
-
-        for(int i=0; i<leftBridge.size(); i++){
-            NodeRef node = leftBridge.get(i);
-
-            if(i>0){
-                tempBranchMap[node.getNumber()]=aCase;
-                HashSet<Integer> nodesToChangeUp = c2cTreeLikelihood.samePartitionUpTree(node, false);
-                for(Integer number : nodesToChangeUp){
-                    tempBranchMap[number]=aCase;
-                }
-            }
-
-            branchMaps[i] = Arrays.copyOf(tempBranchMap, tempBranchMap.length);
-
-            logProbabilities[i] = c2cTransLikelihood.calculateTempLogLikelihood(tempBranchMap);
-
-        }
-
-        // right bridge
-
-        nodesToChange = c2cTreeLikelihood.samePartitionDownTree(tip2, false);
-
-        tempBranchMap = Arrays.copyOf(originalBranchMap.getArrayCopy(), tree.getNodeCount());
-
-        for(Integer number : nodesToChange){
-            if(!tree.isExternal(tree.getNode(number))){
-                tempBranchMap[number] = aCase;
-            }
-        }
-
-        for(int i=0; i<rightBridge.size(); i++){
-            NodeRef node = rightBridge.get(i);
-
-            if(i>0){
-                tempBranchMap[node.getNumber()]=anInfector;
-                HashSet<Integer> nodesToChangeUp = c2cTreeLikelihood.samePartitionUpTree(node, false);
-                for(Integer number : nodesToChangeUp){
-                    tempBranchMap[number]=anInfector;
-                }
-            }
-
-            branchMaps[branchMaps.length-1-i] = Arrays.copyOf(tempBranchMap, tempBranchMap.length);
-
-            logProbabilities[branchMaps.length-1-i] = c2cTransLikelihood.calculateTempLogLikelihood(tempBranchMap);
-
-        }
-
-
-        // this prevents underflow
-
-        int choice = MathUtils.randomChoiceLogPDF(logProbabilities);
-
-        originalBranchMap.setAll(branchMaps[choice], false);
-    }
-
-    public static XMLObjectParser PARSER = new AbstractXMLObjectParser(){
-
-        public String getParserName(){
-            return INFECTION_BRANCH_GIBBS_OPERATOR;
-        }
-
-        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-            CaseToCaseTransmissionLikelihood ftLikelihood =
-                    (CaseToCaseTransmissionLikelihood) xo.getChild(CaseToCaseTransmissionLikelihood.class);
-            final double weight = xo.getDoubleAttribute("weight");
-            return new InfectionBranchGibbsOperator(ftLikelihood, weight);
-        }
-
-        public String getParserDescription(){
-            return "A Gibbs sampler on the branches that correspond to the infection of each case";
-        }
-
-        public Class getReturnType() {
-            return InfectionBranchGibbsOperator.class;
-        }
-
-        public XMLSyntaxRule[] getSyntaxRules() {
-            return rules;
-        }
-
-        private final XMLSyntaxRule[] rules = {
-                AttributeRule.newDoubleRule("weight"),
-                new ElementRule(CaseToCaseTransmissionLikelihood.class),
-        };
-    };
-
-}
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java b/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
index 0794196..f2a8315 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
@@ -27,15 +27,11 @@ package dr.evomodel.epidemiology.casetocase.operators;
 
 import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.*;
-import dr.inference.model.Parameter;
 import dr.inference.operators.MCMCOperator;
 import dr.inference.operators.SimpleMCMCOperator;
 import dr.math.MathUtils;
 import dr.xml.*;
 
-import java.util.HashMap;
-import java.util.HashSet;
-
 /**
  * This operator finds a branch that corresponds to a transmission event, and moves that event up one branch or down
  * one branch
@@ -86,7 +82,7 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
         while(branchMap.get(node.getNumber())==branchMap.get(tree.getParent(node).getNumber())){
             node = tree.getParent(node);
         }
-        double hr = adjustTree(tree, node, branchMap, true);
+        double hr = adjustTree(tree, node, branchMap);
 
         if(DEBUG){
             c2cLikelihood.debugOutputTree("after.nex", false);
@@ -96,21 +92,24 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
     }
 
 
-    private double adjustTree(PartitionedTreeModel tree, NodeRef node, BranchMapModel map, boolean extended){
+    private double adjustTree(PartitionedTreeModel tree, NodeRef node, BranchMapModel map){
         // are we going up or down? If we're not extended then all moves are down. External nodes have to move down.
         double out;
-        if(!extended || tree.isExternal(node) || MathUtils.nextBoolean()){
-            out = moveDown(tree, node, map, extended);
+
+
+
+        if(tree.isExternal(node) || MathUtils.nextBoolean()){
+            out = moveDown(tree, node, map);
         } else {
             out = moveUp(tree, node, map);
         }
         if(DEBUG){
-            c2cLikelihood.checkPartitions();
+            c2cLikelihood.getTreeModel().checkPartitions();
         }
         return out;
     }
 
-    private double moveDown(PartitionedTreeModel tree, NodeRef node, BranchMapModel map, boolean extended){
+    private double moveDown(PartitionedTreeModel tree, NodeRef node, BranchMapModel map){
 
         AbstractCase infectedCase = map.get(node.getNumber());
 
@@ -120,7 +119,7 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         double hr = 0;
 
-        assert map.get(parent.getNumber())==map.get(node.getNumber()) : "Partition problem";
+        assert map.get(parent.getNumber()) == map.get(node.getNumber()) : "Partition problem";
 
         NodeRef sibling = node;
         for(int i=0; i<tree.getChildCount(parent); i++){
@@ -131,19 +130,16 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         AbstractCase infectorCase = map.get(parent.getNumber());
 
-        if(!extended || c2cLikelihood.tipLinked(parent)){
+        if(c2cLikelihood.isAncestral(parent)){
 
             if(resampleInfectionTimes){
-
                 infectorCase.setInfectionBranchPosition(MathUtils.nextDouble());
-
             }
 
             NodeRef grandparent = tree.getParent(parent);
             if(grandparent!=null && map.get(grandparent.getNumber())==map.get(parent.getNumber())){
-
-                for(Integer ancestor: c2cLikelihood.samePartitionDownTree(parent, true)){
-                    newMap[ancestor]=map.get(node.getNumber());
+                for(Integer ancestor: c2cLikelihood.getTreeModel().samePartitionDownTree(parent)){
+                    newMap[ancestor] = map.get(node.getNumber());
                 }
                 newMap[grandparent.getNumber()]=map.get(node.getNumber());
             }
@@ -153,7 +149,7 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         } else {
             if(map.get(sibling.getNumber())==map.get(parent.getNumber())){
-                for(Integer descendant: c2cLikelihood.samePartitionUpTree(sibling, true)){
+                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionUpTree(sibling)){
                     newMap[descendant]=map.get(node.getNumber());
                 }
                 newMap[sibling.getNumber()]=map.get(node.getNumber());
@@ -181,16 +177,14 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         NodeRef parent = tree.getParent(node);
 
-        AbstractCase infectorCase = map.get(parent.getNumber());
-
-        assert map.get(parent.getNumber())==map.get(node.getNumber()) : "Partition problem";
-        // check if either child is not tip-linked (at most one is not, and if so it must have been in the same
+        assert map.get(parent.getNumber()) == map.get(node.getNumber()) : "Partition problem";
+        // check if either child is not ancestral (at most one is not, and if so it must have been in the same
         // partition as both the other child and 'node')
         for(int i=0; i<tree.getChildCount(node); i++){
             NodeRef child = tree.getChild(node, i);
-            if(!c2cLikelihood.tipLinked(child)){
-                assert map.get(child.getNumber())==map.get(node.getNumber()) : "Partition problem";
-                for(Integer descendant: c2cLikelihood.samePartitionUpTree(child, true)){
+            if(!c2cLikelihood.isAncestral(child)){
+                assert map.get(child.getNumber()) == map.get(node.getNumber()) : "Partition problem";
+                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionUpTree(child)){
                     newMap[descendant]=map.get(parent.getNumber());
                 }
                 newMap[child.getNumber()]=map.get(parent.getNumber());
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorA.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorA.java
index 666dcfe..1fe0087 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorA.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorA.java
@@ -27,19 +27,16 @@ package dr.evomodel.epidemiology.casetocase.operators;
 
 import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
-import dr.evomodel.epidemiology.casetocase.AbstractOutbreak;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
 import dr.inference.operators.MCMCOperator;
 import dr.inference.operators.OperatorFailedException;
 import dr.math.MathUtils;
 import dr.xml.*;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 
 /**
  * Implements branch exchange operations that leave the transmission tree unchanged. As this already severely
@@ -114,8 +111,6 @@ public class TransmissionExchangeOperatorA extends AbstractTreeOperator {
         if(resampleInfectionTimes){
             BranchMapModel branchMap = c2cLikelihood.getBranchMap();
 
-
-
             AbstractCase iCase = branchMap.get(i.getNumber());
             AbstractCase jCase = branchMap.get(j.getNumber());
             AbstractCase parentCase = branchMap.get(iP.getNumber());
@@ -130,13 +125,6 @@ public class TransmissionExchangeOperatorA extends AbstractTreeOperator {
 
         }
 
-/*
-        I tend to think that this may fail quite a lot of the time due to lack of candidates... a version that does
-        actually adjust heights might be necessary in the long run. Narrow exchange might be much more likely to
-        actually succeed in changing the tree if the paintings allow the tree to be changed in that way; might
-        have to investigate which problem is more serious.
-*/
-
         exchangeNodes(tree, i, j, iP, jP);
 
         ArrayList<NodeRef> reverseCandidatesIfirst = getPossibleExchanges(tree, i);
@@ -157,7 +145,7 @@ public class TransmissionExchangeOperatorA extends AbstractTreeOperator {
         if(parent==null){
             throw new RuntimeException("Can't exchange the root node");
         }
-        Integer[] possibleParentSwaps = c2cLikelihood.samePartition(parent, false);
+        Integer[] possibleParentSwaps = c2cLikelihood.getTreeModel().samePartitionElement(parent);
         for(Integer index: possibleParentSwaps){
             NodeRef newParent = tree.getNode(index);
             if(!tree.isExternal(newParent) && newParent!=parent){
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorB.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorB.java
index 826c405..f918e59 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorB.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionExchangeOperatorB.java
@@ -121,10 +121,6 @@ public class TransmissionExchangeOperatorB extends AbstractTreeOperator {
             jCase.setInfectionBranchPosition(MathUtils.nextDouble());
         }
 
-/*
-        Intuitively it would seem this is a lot more likely to succeed than operator A.
-*/
-
         exchangeNodes(tree, i, j, iP, jP);
 
         ArrayList<NodeRef> reverseCandidatesIfirst = getPossibleExchanges(tree, i);
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
index b6203cf..9ad00a0 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
@@ -105,9 +105,6 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
 
         double logq = 0;
 
-        final NodeRef root = tree.getRoot();
-        final double oldTreeHeight = tree.getNodeHeight(root);
-
         NodeRef i;
 
         // 1. choose a random eligible node
@@ -350,7 +347,7 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
         if (logq == Double.NEGATIVE_INFINITY) throw new OperatorFailedException("invalid slide");
 
         if (DEBUG){
-            c2cLikelihood.checkPartitions();
+            c2cLikelihood.getTreeModel().checkPartitions();
             c2cLikelihood.debugOutputTree("afterTSSA.nex", false);
         }
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
index 718219e..17f07b3 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
@@ -106,9 +106,6 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
 
         double logq;
 
-        final NodeRef root = tree.getRoot();
-        final double oldTreeHeight = tree.getNodeHeight(root);
-
         NodeRef i;
 
         // 1. choose a random eligible node avoiding root
@@ -352,13 +349,11 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
                         newiPCase = newChildCase;
                     }
 
-
                     if(resampleInfectionTimes) {
                         //whichever we picked for iP, it's the new child's case whose infection branch is modified
                         // (even if this infection branch is iP's branch)
 
                         newChildCase.setInfectionBranchPosition(MathUtils.nextDouble());
-
                     }
 
                     logq += Math.log(2);
@@ -404,7 +399,7 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
         if (logq == Double.NEGATIVE_INFINITY) throw new OperatorFailedException("invalid slide");
 
         if (DEBUG) {
-            c2cLikelihood.checkPartitions();
+            c2cLikelihood.getTreeModel().checkPartitions();
             c2cLikelihood.debugOutputTree("afterTSSB.nex", false);
         }
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
index 3c00bdf..d2ce726 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
@@ -27,19 +27,16 @@ package dr.evomodel.epidemiology.casetocase.operators;
 
 import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
-import dr.evomodel.epidemiology.casetocase.AbstractOutbreak;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
 import dr.inference.operators.MCMCOperator;
 import dr.inference.operators.OperatorFailedException;
 import dr.math.MathUtils;
 import dr.xml.*;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.HashSet;
 
 /**
@@ -93,7 +90,7 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
         int eligibleNodeCount = eligibleNodes.size();
 
         final NodeRef iP = tree.getParent(i);
-        Integer[] samePaintings = c2cLikelihood.samePartition(iP, false);
+        Integer[] samePaintings = c2cLikelihood.getTreeModel().samePartitionElement(iP);
         HashSet<Integer> possibleDestinations = new HashSet<Integer>();
         // we can insert the node above OR BELOW any node in the same partition
         for (Integer samePainting : samePaintings) {
@@ -132,7 +129,6 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
                 PiPCase = branchMap.get(PiP.getNumber());
             }
 
-
             // what happens on i's branch
 
             if (iCase != iPCase) {
@@ -210,7 +206,7 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
         tree.endTreeEdit();
 
         if(DEBUG){
-            c2cLikelihood.checkPartitions();
+            c2cLikelihood.getTreeModel().checkPartitions();
         }
         logq = Math.log(q);
 
@@ -227,8 +223,8 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
     private boolean eligibleForMove(NodeRef node, TreeModel tree, BranchMapModel branchMap){
         // to be eligible for this move, the node's parent and grandparent, or parent and other child, must be in the
         // same partition (so removing the parent has no effect on the transmission tree)
-
-        return  (!tree.isRoot(node) && ((tree.getParent(tree.getParent(node))!=null
+        return  (!tree.isRoot(node)
+                && ((tree.getParent(tree.getParent(node))!=null
                 && branchMap.get(tree.getParent(node).getNumber())
                 ==branchMap.get(tree.getParent(tree.getParent(node)).getNumber()))
                 || branchMap.get(tree.getParent(node).getNumber())==branchMap.get(getOtherChild(tree,
@@ -296,7 +292,4 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
             };
         }
     };
-
-
-
 }
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
index 3b4488f..2d7b165 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
@@ -1,5 +1,5 @@
 /*
- * TransmissionWilsonBaldingB.java
+ * TransmissionSubtreeSlideB.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -220,7 +220,7 @@ public class TransmissionWilsonBaldingB extends AbstractTreeOperator {
         }
 
         if(DEBUG){
-            c2cLikelihood.checkPartitions();
+            c2cLikelihood.getTreeModel().checkPartitions();
         }
 
     }
diff --git a/src/dr/evomodel/epidemiology/casetocase/periodpriors/KnownVarianceNormalPeriodPriorDistribution.java b/src/dr/evomodel/epidemiology/casetocase/periodpriors/KnownVarianceNormalPeriodPriorDistribution.java
index 66946a7..41cee17 100644
--- a/src/dr/evomodel/epidemiology/casetocase/periodpriors/KnownVarianceNormalPeriodPriorDistribution.java
+++ b/src/dr/evomodel/epidemiology/casetocase/periodpriors/KnownVarianceNormalPeriodPriorDistribution.java
@@ -28,7 +28,6 @@ package dr.evomodel.epidemiology.casetocase.periodpriors;
 import dr.inference.loggers.LogColumn;
 import dr.inference.model.Parameter;
 import dr.math.distributions.NormalDistribution;
-import dr.math.functionEval.GammaFunction;
 import dr.xml.*;
 
 import java.util.ArrayList;
diff --git a/src/dr/evomodel/epidemiology/casetocase/periodpriors/NormalPeriodPriorDistribution.java b/src/dr/evomodel/epidemiology/casetocase/periodpriors/NormalPeriodPriorDistribution.java
index 874f4d7..a0026c1 100644
--- a/src/dr/evomodel/epidemiology/casetocase/periodpriors/NormalPeriodPriorDistribution.java
+++ b/src/dr/evomodel/epidemiology/casetocase/periodpriors/NormalPeriodPriorDistribution.java
@@ -32,7 +32,6 @@ import dr.math.distributions.NormalGammaDistribution;
 import dr.math.functionEval.GammaFunction;
 import dr.xml.*;
 import org.apache.commons.math.MathException;
-import org.apache.commons.math.distribution.TDistribution;
 import org.apache.commons.math.distribution.TDistributionImpl;
 
 import java.util.ArrayList;
diff --git a/src/dr/evomodel/lineage/LineageSitePatterns.java b/src/dr/evomodel/lineage/LineageSitePatterns.java
index 178459f..e4f5a0a 100644
--- a/src/dr/evomodel/lineage/LineageSitePatterns.java
+++ b/src/dr/evomodel/lineage/LineageSitePatterns.java
@@ -480,6 +480,11 @@ public class LineageSitePatterns extends AbstractModel implements SiteList, dr.u
         return PatternList.Utils.empiricalStateFrequencies(this);
     }
 
+    @Override
+    public boolean areUnique() {
+        return unique;
+    }
+
     // **************************************************************
     // TaxonList IMPLEMENTATION
     // **************************************************************
diff --git a/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java b/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java
new file mode 100644
index 0000000..448a385
--- /dev/null
+++ b/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java
@@ -0,0 +1,161 @@
+package dr.evomodel.operators;
+
+import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
+import dr.inference.model.LatentFactorModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.inference.operators.AbstractHamiltonianMCOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.OperatorFailedException;
+import dr.math.MathUtils;
+
+/**
+ * Created by max on 12/2/15.
+ */
+public class LatentFactorHamiltonianMC extends AbstractHamiltonianMCOperator{
+    private LatentFactorModel lfm;
+    private FullyConjugateMultivariateTraitLikelihood tree;
+    private MatrixParameter factors;
+    private MatrixParameter loadings;
+    private MatrixParameter Precision;
+    private int nfac;
+    private int ntaxa;
+    private int ntraits;
+    private double stepSize;
+    private int nSteps;
+    private boolean diffusionSN=true;
+
+
+    public LatentFactorHamiltonianMC(LatentFactorModel lfm, FullyConjugateMultivariateTraitLikelihood tree, double weight, CoercionMode mode, double stepSize, int nSteps, double momentumSd){
+        super(mode, momentumSd);
+        setWeight(weight);
+        this.lfm=lfm;
+        this.tree=tree;
+        this.factors=lfm.getFactors();
+        this.loadings=lfm.getLoadings();
+        this.Precision=lfm.getColumnPrecision();
+        nfac=lfm.getFactorDimension();
+        ntaxa=lfm.getFactors().getColumnDimension();
+        ntraits=Precision.getRowDimension();
+        this.stepSize=stepSize;
+        this.nSteps=nSteps;
+    }
+
+
+
+    @Override
+    public double getCoercableParameter() {
+        return 0;
+    }
+
+    @Override
+    public void setCoercableParameter(double value) {
+
+    }
+
+    @Override
+    public double getRawParameter() {
+        return 0;
+    }
+
+    @Override
+    public String getPerformanceSuggestion() {
+        return null;
+    }
+
+    @Override
+    public String getOperatorName() {
+        return "Latent Factor Hamiltonian Monte Carlo";
+    }
+
+    private double[] getMatrix(int element, double[] residual){
+        double answer[]=new double[this.nfac];
+        for (int i = 0; i <this.nfac ; i++) {
+            for (int j = 0; j < ntraits; j++) {
+                answer[i] +=loadings.getParameterValue(i,j)*Precision.getParameterValue(j,j)*
+                        residual[j*ntaxa+element];
+            }
+        }
+        return answer;
+    }
+
+    private double[] getGradient(int randel, double[] mean, double[][] prec, double precfactor){
+        double[] residual=lfm.getResidual();
+        double[] derivative=getMatrix(randel, residual);
+
+        if(diffusionSN){
+            for (int i = 0; i <mean.length ; i++) {
+                derivative[i]-=(factors.getParameterValue(i, randel)-mean[i])*precfactor;
+            }
+        }
+        else{
+            for (int i = 0; i <mean.length ; i++) {
+                double sumi=0;
+                for (int j = 0; j <mean.length ; j++) {
+                    sumi+=prec[i][j]*(factors.getParameterValue(j, randel)-mean[j]);
+                }
+                derivative[i]-=sumi;
+            }
+        }
+        return derivative;
+    }
+
+    @Override
+    public double doOperation() throws OperatorFailedException {
+        int randel = MathUtils.nextInt(ntaxa);
+
+
+
+        double[] mean=tree.getConditionalMean(randel);
+        double precfactor=0;
+        double[][] prec=null;
+        if(diffusionSN){
+            precfactor=tree.getPrecisionFactor(randel);
+        }
+        else {
+            prec = tree.getConditionalPrecision(randel);
+        }
+
+        double[] derivative=getGradient(randel, mean, prec, precfactor);
+        drawMomentum(lfm.getFactorDimension());
+
+        double prop=0;
+        for (int i = 0; i <momentum.length ; i++) {
+            prop+=momentum[i]*momentum[i]/(2*getMomentumSd()*getMomentumSd());
+        }
+
+        for (int i = 0; i <lfm.getFactorDimension() ; i++) {
+            momentum[i] = momentum[i] - stepSize / 2 * derivative[i];
+        }
+
+        for (int i = 0; i <nSteps ; i++) {
+            for (int j = 0; j <lfm.getFactorDimension() ; j++) {
+                factors.setParameterValueQuietly(j, randel, factors.getParameterValue(j,randel)+stepSize*momentum[j]);
+            }
+//            System.out.println("randel");
+//            System.out.println(randel);
+            ((Parameter.Default) factors.getParameter(randel)).fireParameterChangedEvent(0, null);
+
+
+            if(i!=nSteps){
+                derivative=getGradient(randel,mean,prec, precfactor);
+
+                for (int j = 0; j <lfm.getFactorDimension() ; j++) {
+                    momentum[j] = momentum[j] - stepSize * derivative[j];
+                }
+            }
+        }
+
+        derivative=getGradient(randel,mean,prec, precfactor);
+        for (int i = 0; i <lfm.getFactorDimension() ; i++) {
+
+            momentum[i] = momentum[i] - stepSize / 2 * derivative[i];
+        }
+
+        double res=0;
+        for (int i = 0; i <momentum.length ; i++) {
+            res+=momentum[i]*momentum[i]/(2*getMomentumSd()*getMomentumSd());
+        }
+        return prop-res;
+    }
+}
diff --git a/src/dr/evomodel/operators/LatentLiabilityGibbs.java b/src/dr/evomodel/operators/LatentLiabilityGibbs.java
index e480a78..d861524 100644
--- a/src/dr/evomodel/operators/LatentLiabilityGibbs.java
+++ b/src/dr/evomodel/operators/LatentLiabilityGibbs.java
@@ -172,8 +172,8 @@ public class LatentLiabilityGibbs extends SimpleMCMCOperator {
     public double doOperation() throws OperatorFailedException {
 
 
-        doPostOrderTraversal(treeModel.getRoot());
-        doPreOrderTraversal(treeModel.getRoot());
+//        doPostOrderTraversal(treeModel.getRoot());
+//        doPreOrderTraversal(treeModel.getRoot());
 //printInformation(postP);
 //printInformation(preP);
 //printInformation(postMeans);
@@ -466,27 +466,28 @@ public class LatentLiabilityGibbs extends SimpleMCMCOperator {
     public double sampleNode2(NodeRef node) {
 
         final int thisNumber = node.getNumber();
-        double[] traitValue = getNodeTrait(node);
-
-
-        double[] mean = new double[dim];
-        for (int i = 0; i < dim; i++) {
-            mean[i] = preMeans[thisNumber][i];
-        }
-
-        double p = preP[thisNumber];
-
-        double[][] thisP = new double[dim][dim];
-
-        for (int i = 0; i < dim; i++) {
-            for (int j = 0; j < dim; j++) {
-
-                thisP[i][j] = p * precisionParam.getParameterValue(i, j);
-
-            }
-        }
-
-
+//        double[] traitValue = getNodeTrait(node);
+
+
+//        double[] mean = new double[dim];
+//        for (int i = 0; i < dim; i++) {
+//            mean[i] = preMeans[thisNumber][i];
+//        }
+//
+//        double p = preP[thisNumber];
+//
+//        double[][] thisP = new double[dim][dim];
+//
+//        for (int i = 0; i < dim; i++) {
+//            for (int j = 0; j < dim; j++) {
+//
+//                thisP[i][j] = p * precisionParam.getParameterValue(i, j);
+//
+//            }
+//        }
+
+        double[] mean=traitModel.getConditionalMean(thisNumber);
+        double[][] thisP=traitModel.getConditionalPrecision(thisNumber);
 
 
 
diff --git a/src/dr/inference/operators/MicrosatUpDownOperator.java b/src/dr/evomodel/operators/MicrosatUpDownOperator.java
similarity index 96%
rename from src/dr/inference/operators/MicrosatUpDownOperator.java
rename to src/dr/evomodel/operators/MicrosatUpDownOperator.java
index cdfa4d3..935bf26 100644
--- a/src/dr/inference/operators/MicrosatUpDownOperator.java
+++ b/src/dr/evomodel/operators/MicrosatUpDownOperator.java
@@ -1,163 +1,164 @@
-/*
- * MicrosatUpDownOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.math.MathUtils;
-
-/**
- *
- * @author Chieh-Hsi
- *
- * Implements MicrosatUpDownOperator
- *
- * This is almost the same as UpDownOperator, except it uses scaleAllAndNotify method instead of scale.
- *
- */
-public class MicrosatUpDownOperator extends AbstractCoercableOperator {
-
-    private Scalable.Default[] upParameter = null;
-    private Scalable.Default[] downParameter = null;
-    private double scaleFactor;
-
-    public MicrosatUpDownOperator(Scalable.Default[] upParameter,
-                                  Scalable.Default[] downParameter,
-                                  double scale,
-                                  double weight,
-                                  CoercionMode mode) {
-
-        super(mode);
-        setWeight(weight);
-
-        this.upParameter = upParameter;
-        this.downParameter = downParameter;
-        this.scaleFactor = scale;
-    }
-
-    public final double getScaleFactor() {
-        return scaleFactor;
-    }
-
-    public final void setScaleFactor(double sf) {
-        if( (sf > 0.0) && (sf < 1.0) ) {
-            scaleFactor = sf;
-        } else {
-            throw new IllegalArgumentException("scale must be between 0 and 1");
-        }
-    }
-
-    /**
-     * change the parameter and return the hastings ratio.
-     */
-    public final double doOperation() throws OperatorFailedException {
-
-
-        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
-        int goingUp = 0, goingDown = 0;
-
-        if( upParameter != null ) {
-            for( Scalable.Default up : upParameter ) {
-                goingUp += up.scaleAllAndNotify(scale, -1);
-            }
-        }
-
-        if( downParameter != null ) {
-            for(Scalable.Default dn : downParameter ) {
-                goingDown += dn.scaleAllAndNotify(1.0 / scale, -1);
-            }
-        }
-
-        return (goingUp - goingDown - 2) * Math.log(scale);
-    }
-
-    public final String getPerformanceSuggestion() {
-
-        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
-        double targetProb = getTargetAcceptanceProbability();
-        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
-        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
-        if (prob < getMinimumGoodAcceptanceLevel()) {
-            return "Try setting scaleFactor to about " + formatter.format(sf);
-        } else if (prob > getMaximumGoodAcceptanceLevel()) {
-            return "Try setting scaleFactor to about " + formatter.format(sf);
-        } else return "";
-    }
-
-    public final String getOperatorName() {
-        String name = "";
-        if( upParameter != null ) {
-            name = "up:";
-            for( Scalable up : upParameter ) {
-                name = name + up.getName() + " ";
-            }
-        }
-
-        if( downParameter != null ) {
-            name += "down:";
-            for( Scalable dn : downParameter ) {
-                name = name + dn.getName() + " ";
-            }
-        }
-        return name;
-    }
-
-    public double getCoercableParameter() {
-        return Math.log(1.0 / scaleFactor - 1.0) / Math.log(10);
-    }
-
-    public void setCoercableParameter(double value) {
-        scaleFactor = 1.0 / (Math.pow(10.0, value) + 1.0);
-    }
-
-    public double getRawParameter() {
-        return scaleFactor;
-    }
-
-    public double getTargetAcceptanceProbability() {
-        return 0.234;
-    }
-
-    // Since this operator invariably modifies at least 2 parameters it
-    // should allow lower acceptance probabilities
-    // as it is known that optimal acceptance levels are inversely
-    // proportional to the number of dimensions operated on
-    // AD 16/3/2004
-    public double getMinimumAcceptanceLevel() {
-        return 0.05;
-    }
-
-    public double getMaximumAcceptanceLevel() {
-        return 0.3;
-    }
-
-    public double getMinimumGoodAcceptanceLevel() {
-        return 0.10;
-    }
-
-    public double getMaximumGoodAcceptanceLevel() {
-        return 0.20;
-    }
-
+/*
+ * MicrosatUpDownOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.inference.operators.*;
+import dr.math.MathUtils;
+
+/**
+ *
+ * @author Chieh-Hsi
+ *
+ * Implements MicrosatUpDownOperator
+ *
+ * This is almost the same as UpDownOperator, except it uses scaleAllAndNotify method instead of scale.
+ *
+ */
+public class MicrosatUpDownOperator extends AbstractCoercableOperator {
+
+    private Scalable.Default[] upParameter = null;
+    private Scalable.Default[] downParameter = null;
+    private double scaleFactor;
+
+    public MicrosatUpDownOperator(Scalable.Default[] upParameter,
+                                  Scalable.Default[] downParameter,
+                                  double scale,
+                                  double weight,
+                                  CoercionMode mode) {
+
+        super(mode);
+        setWeight(weight);
+
+        this.upParameter = upParameter;
+        this.downParameter = downParameter;
+        this.scaleFactor = scale;
+    }
+
+    public final double getScaleFactor() {
+        return scaleFactor;
+    }
+
+    public final void setScaleFactor(double sf) {
+        if( (sf > 0.0) && (sf < 1.0) ) {
+            scaleFactor = sf;
+        } else {
+            throw new IllegalArgumentException("scale must be between 0 and 1");
+        }
+    }
+
+    /**
+     * change the parameter and return the hastings ratio.
+     */
+    public final double doOperation() throws OperatorFailedException {
+
+
+        final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor)));
+        int goingUp = 0, goingDown = 0;
+
+        if( upParameter != null ) {
+            for( Scalable.Default up : upParameter ) {
+                goingUp += up.scaleAllAndNotify(scale, -1);
+            }
+        }
+
+        if( downParameter != null ) {
+            for(Scalable.Default dn : downParameter ) {
+                goingDown += dn.scaleAllAndNotify(1.0 / scale, -1);
+            }
+        }
+
+        return (goingUp - goingDown - 2) * Math.log(scale);
+    }
+
+    public final String getPerformanceSuggestion() {
+
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb);
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+
+    public final String getOperatorName() {
+        String name = "";
+        if( upParameter != null ) {
+            name = "up:";
+            for( Scalable up : upParameter ) {
+                name = name + up.getName() + " ";
+            }
+        }
+
+        if( downParameter != null ) {
+            name += "down:";
+            for( Scalable dn : downParameter ) {
+                name = name + dn.getName() + " ";
+            }
+        }
+        return name;
+    }
+
+    public double getCoercableParameter() {
+        return Math.log(1.0 / scaleFactor - 1.0) / Math.log(10);
+    }
+
+    public void setCoercableParameter(double value) {
+        scaleFactor = 1.0 / (Math.pow(10.0, value) + 1.0);
+    }
+
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    // Since this operator invariably modifies at least 2 parameters it
+    // should allow lower acceptance probabilities
+    // as it is known that optimal acceptance levels are inversely
+    // proportional to the number of dimensions operated on
+    // AD 16/3/2004
+    public double getMinimumAcceptanceLevel() {
+        return 0.05;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.3;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.10;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
 }
\ No newline at end of file
diff --git a/src/dr/inference/operators/MicrosatelliteModelSelectOperator.java b/src/dr/evomodel/operators/MicrosatelliteModelSelectOperator.java
similarity index 95%
rename from src/dr/inference/operators/MicrosatelliteModelSelectOperator.java
rename to src/dr/evomodel/operators/MicrosatelliteModelSelectOperator.java
index 9ba2e8a..71f0438 100644
--- a/src/dr/inference/operators/MicrosatelliteModelSelectOperator.java
+++ b/src/dr/evomodel/operators/MicrosatelliteModelSelectOperator.java
@@ -1,70 +1,71 @@
-/*
- * MicrosatelliteModelSelectOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.inference.model.Parameter;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * Operator that selects a microsatellite model from a group provided by the user.
- */
-public class MicrosatelliteModelSelectOperator extends SimpleMCMCOperator{
-    private Parameter parameter;
-    private Parameter[] indicators;
-    public MicrosatelliteModelSelectOperator(Parameter parameter, Parameter[] indicators, double weight){
-        this.parameter = parameter;
-        this.indicators = indicators;
-        setWeight(weight);
-    }
-
-    public String getOperatorName(){
-        return "msatModelSelectOperator("+parameter.getParameterName()+")";
-    }
-
-    public final String getPerformanceSuggestion() {
-        return "no suggestions available";
-    }
-
-    public double doOperation(){
-        int index = MathUtils.nextInt(indicators.length);
-        //System.out.println(index);
-        Parameter newModel = indicators[index];
-        for(int i = 0; i < parameter.getDimension() -1 ; i++){
-            parameter.setParameterValueQuietly(i,newModel.getParameterValue(i));
-        }
-        parameter.setParameterValueNotifyChangedAll(
-                parameter.getDimension()-1,
-                newModel.getParameterValue(parameter.getDimension()-1)
-        );
-        //System.out.println(parameter+"; "+indicators[index]);
-        return 0.0;
-    }
-
-
-
-}
+/*
+ * MicrosatelliteModelSelectOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.inference.model.Parameter;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * Operator that selects a microsatellite model from a group provided by the user.
+ */
+public class MicrosatelliteModelSelectOperator extends SimpleMCMCOperator {
+    private Parameter parameter;
+    private Parameter[] indicators;
+    public MicrosatelliteModelSelectOperator(Parameter parameter, Parameter[] indicators, double weight){
+        this.parameter = parameter;
+        this.indicators = indicators;
+        setWeight(weight);
+    }
+
+    public String getOperatorName(){
+        return "msatModelSelectOperator("+parameter.getParameterName()+")";
+    }
+
+    public final String getPerformanceSuggestion() {
+        return "no suggestions available";
+    }
+
+    public double doOperation(){
+        int index = MathUtils.nextInt(indicators.length);
+        //System.out.println(index);
+        Parameter newModel = indicators[index];
+        for(int i = 0; i < parameter.getDimension() -1 ; i++){
+            parameter.setParameterValueQuietly(i,newModel.getParameterValue(i));
+        }
+        parameter.setParameterValueNotifyChangedAll(
+                parameter.getDimension()-1,
+                newModel.getParameterValue(parameter.getDimension()-1)
+        );
+        //System.out.println(parameter+"; "+indicators[index]);
+        return 0.0;
+    }
+
+
+
+}
diff --git a/src/dr/inference/operators/MsatBitFlipOperator.java b/src/dr/evomodel/operators/MsatBitFlipOperator.java
similarity index 94%
rename from src/dr/inference/operators/MsatBitFlipOperator.java
rename to src/dr/evomodel/operators/MsatBitFlipOperator.java
index 7f174da..a95b1ea 100644
--- a/src/dr/inference/operators/MsatBitFlipOperator.java
+++ b/src/dr/evomodel/operators/MsatBitFlipOperator.java
@@ -1,156 +1,159 @@
-/*
- * MsatBitFlipOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.inference.model.Parameter;
-import dr.xml.*;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- *  This operator performs bitflip operation on the bit vector representing the model.
- * 
- */
-public class MsatBitFlipOperator extends SimpleMCMCOperator{
-    private Parameter parameter;
-    private Parameter dependencies;
-    private int[] variableIndices;
-    public static final int PRESENT = 1;
-    public static final int ABSENT = 0;
-    public static final int NO_DEPENDENCY = -1;
-    public static final String MODEL_CHOOSE = "modelChoose";
-    public static final String DEPENDENCIES = "dependencies";
-    public static final String VARIABLE_INDICES = "variableIndices";
-
-
-    public MsatBitFlipOperator(Parameter parameter, Parameter dependencies, double weight, int[] variableIndices){
-        this.parameter = parameter;
-        this.dependencies = dependencies;
-        this.variableIndices = variableIndices;
-        if(parameter.getDimension() != dependencies.getDimension())
-            throw new RuntimeException("Dimenension of the parameter ("+parameter.getDimension()+
-                    ") does not equal to the dimension of the dependencies parameter("+dependencies.getDimension()+").");
-        setWeight(weight);
-    }
-
-    public String getOperatorName(){
-        return "msatModelSwitch(" + parameter.getParameterName() + ")";
-    }
-
-    public double doOperation() throws OperatorFailedException{
-
-        double logq = 0.0;
-        double[] bitVec = new double[parameter.getDimension()];
-        for(int i = 0; i < bitVec.length; i++){
-            bitVec[i] = parameter.getParameterValue(i);
-        }
-        //int index = (int)Math.random()*parameter.getDimension();
-        int index = variableIndices[MathUtils.nextInt(variableIndices.length)];
-        //System.out.println(index);
-        int oldVal  = (int)parameter.getParameterValue(index);
-        int newVal = -1;
-        if(oldVal == ABSENT){
-            newVal = PRESENT;
-        }else if(oldVal == PRESENT){
-           newVal = ABSENT;
-        }else{
-            throw new RuntimeException("The parameter can only take values 0 or 1.");
-        }
-        bitVec[index] = newVal;
-        for(int i = 0; i < bitVec.length; i++){
-            int dependentInd = (int)dependencies.getParameterValue(i);
-            if(dependentInd > NO_DEPENDENCY){
-                if(bitVec[dependentInd] == ABSENT && bitVec[i]==PRESENT){
-                    throw new OperatorFailedException("");
-                    //newVal = oldVal;
-                }
-            }
-
-        }
-        parameter.setParameterValue(index, newVal);
-
-        return logq;
-    }
-
-    public final String getPerformanceSuggestion() {
-        return "no suggestions available";
-    }
-
-    public static dr.xml.XMLObjectParser PARSER = new AbstractXMLObjectParser() {
-
-        public String getParserName() {
-            return "msatModelSwitchOperator";
-        }
-
-        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-            double weight = xo.getDoubleAttribute(WEIGHT);
-            Parameter modelChoose = (Parameter) xo.getElementFirstChild(MODEL_CHOOSE);
-            Parameter dependencies = (Parameter)xo.getElementFirstChild(DEPENDENCIES);
-            int[] variableIndices;
-            if(xo.hasChildNamed(VARIABLE_INDICES)){
-
-                double[] temp = ((Parameter)xo.getElementFirstChild(VARIABLE_INDICES)).getParameterValues();
-                variableIndices = new int[temp.length];
-                for(int i = 0; i < temp.length;i++){
-                    variableIndices[i] = (int)temp[i];
-                }
-
-            }else{
-                variableIndices = new int[]{0, 1, 2, 3, 4, 5};
-            }
-
-            return new MsatBitFlipOperator(modelChoose, dependencies, weight, variableIndices);
-        }
-
-        //************************************************************************
-        // AbstractXMLObjectParser implementation
-        //************************************************************************
-
-        public String getParserDescription() {
-            return "This element returns a microsatellite averaging operator on a given parameter.";
-        }
-
-        public Class getReturnType() {
-            return MCMCOperator.class;
-        }
-
-        public XMLSyntaxRule[] getSyntaxRules() {
-            return rules;
-        }
-
-        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-                AttributeRule.newDoubleRule(WEIGHT),
-                new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-                new ElementRule(DEPENDENCIES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-                new ElementRule(VARIABLE_INDICES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)},true)
-
-        };
-
-    };
-
-}
+/*
+ * MsatBitFlipOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.xml.*;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ *  This operator performs bitflip operation on the bit vector representing the model.
+ * 
+ */
+public class MsatBitFlipOperator extends SimpleMCMCOperator {
+    private Parameter parameter;
+    private Parameter dependencies;
+    private int[] variableIndices;
+    public static final int PRESENT = 1;
+    public static final int ABSENT = 0;
+    public static final int NO_DEPENDENCY = -1;
+    public static final String MODEL_CHOOSE = "modelChoose";
+    public static final String DEPENDENCIES = "dependencies";
+    public static final String VARIABLE_INDICES = "variableIndices";
+
+
+    public MsatBitFlipOperator(Parameter parameter, Parameter dependencies, double weight, int[] variableIndices){
+        this.parameter = parameter;
+        this.dependencies = dependencies;
+        this.variableIndices = variableIndices;
+        if(parameter.getDimension() != dependencies.getDimension())
+            throw new RuntimeException("Dimenension of the parameter ("+parameter.getDimension()+
+                    ") does not equal to the dimension of the dependencies parameter("+dependencies.getDimension()+").");
+        setWeight(weight);
+    }
+
+    public String getOperatorName(){
+        return "msatModelSwitch(" + parameter.getParameterName() + ")";
+    }
+
+    public double doOperation() throws OperatorFailedException {
+
+        double logq = 0.0;
+        double[] bitVec = new double[parameter.getDimension()];
+        for(int i = 0; i < bitVec.length; i++){
+            bitVec[i] = parameter.getParameterValue(i);
+        }
+        //int index = (int)Math.random()*parameter.getDimension();
+        int index = variableIndices[MathUtils.nextInt(variableIndices.length)];
+        //System.out.println(index);
+        int oldVal  = (int)parameter.getParameterValue(index);
+        int newVal = -1;
+        if(oldVal == ABSENT){
+            newVal = PRESENT;
+        }else if(oldVal == PRESENT){
+           newVal = ABSENT;
+        }else{
+            throw new RuntimeException("The parameter can only take values 0 or 1.");
+        }
+        bitVec[index] = newVal;
+        for(int i = 0; i < bitVec.length; i++){
+            int dependentInd = (int)dependencies.getParameterValue(i);
+            if(dependentInd > NO_DEPENDENCY){
+                if(bitVec[dependentInd] == ABSENT && bitVec[i]==PRESENT){
+                    throw new OperatorFailedException("");
+                    //newVal = oldVal;
+                }
+            }
+
+        }
+        parameter.setParameterValue(index, newVal);
+
+        return logq;
+    }
+
+    public final String getPerformanceSuggestion() {
+        return "no suggestions available";
+    }
+
+    public static dr.xml.XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public String getParserName() {
+            return "msatModelSwitchOperator";
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            double weight = xo.getDoubleAttribute(WEIGHT);
+            Parameter modelChoose = (Parameter) xo.getElementFirstChild(MODEL_CHOOSE);
+            Parameter dependencies = (Parameter)xo.getElementFirstChild(DEPENDENCIES);
+            int[] variableIndices;
+            if(xo.hasChildNamed(VARIABLE_INDICES)){
+
+                double[] temp = ((Parameter)xo.getElementFirstChild(VARIABLE_INDICES)).getParameterValues();
+                variableIndices = new int[temp.length];
+                for(int i = 0; i < temp.length;i++){
+                    variableIndices[i] = (int)temp[i];
+                }
+
+            }else{
+                variableIndices = new int[]{0, 1, 2, 3, 4, 5};
+            }
+
+            return new MsatBitFlipOperator(modelChoose, dependencies, weight, variableIndices);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a microsatellite averaging operator on a given parameter.";
+        }
+
+        public Class getReturnType() {
+            return MCMCOperator.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+                AttributeRule.newDoubleRule(WEIGHT),
+                new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+                new ElementRule(DEPENDENCIES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+                new ElementRule(VARIABLE_INDICES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)},true)
+
+        };
+
+    };
+
+}
diff --git a/src/dr/inference/operators/MsatFullAncestryImportanceSamplingOperator.java b/src/dr/evomodel/operators/MsatFullAncestryImportanceSamplingOperator.java
similarity index 96%
rename from src/dr/inference/operators/MsatFullAncestryImportanceSamplingOperator.java
rename to src/dr/evomodel/operators/MsatFullAncestryImportanceSamplingOperator.java
index 8e791b9..03127a8 100644
--- a/src/dr/inference/operators/MsatFullAncestryImportanceSamplingOperator.java
+++ b/src/dr/evomodel/operators/MsatFullAncestryImportanceSamplingOperator.java
@@ -1,126 +1,127 @@
-/*
- * MsatFullAncestryImportanceSamplingOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-import dr.evomodel.tree.TreeModel;
-import dr.evomodel.substmodel.MicrosatelliteModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.inference.model.Parameter;
-import dr.evolution.tree.Tree;
-import dr.evolution.tree.NodeRef;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * Produce an importance sample of the ancestry given a msat pattern and a tree.
- */
-public class MsatFullAncestryImportanceSamplingOperator extends SimpleMCMCOperator{
-
-    public static final String MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR = "MsatFullAncestryImportanceSamplingOperator";
-    private Parameter parameter;
-    private MicrosatelliteSamplerTreeModel msatSamplerTreeModel;
-    private MicrosatelliteModel msatModel;
-    private BranchRateModel branchRateModel;
-
-
-    public MsatFullAncestryImportanceSamplingOperator(
-            Parameter parameter,
-            MicrosatelliteSamplerTreeModel msatSamplerTreeModel,
-            MicrosatelliteModel msatModel,
-            BranchRateModel branchRateModel,
-            double weight){
-
-        super();
-        this.parameter = parameter;
-        this.msatSamplerTreeModel = msatSamplerTreeModel;
-        this.msatModel = msatModel;
-        this.branchRateModel = branchRateModel;
-        setWeight(weight);
-    }
-
-    public double doOperation(){
-        TreeModel tree = msatSamplerTreeModel.getTreeModel();
-
-        //get postOrder
-        int[] postOrder = new int[tree.getNodeCount()];
-        Tree.Utils.postOrderTraversalList(tree,postOrder);
-
-        int extNodeCount = tree.getExternalNodeCount();
-        double logq=0.0;
-        for(int i = 0; i < postOrder.length; i ++){
-
-            //if it's an internal node
-            if(postOrder[i] >= extNodeCount){
-
-                //getLikelihoodGiven the children
-                NodeRef node = tree.getNode(postOrder[i]);
-                NodeRef lc = tree.getChild(node,0);
-                NodeRef rc = tree.getChild(node,1);
-                int lcState = msatSamplerTreeModel.getNodeValue(lc);
-                int rcState = msatSamplerTreeModel.getNodeValue(rc);
-                double branchLeftLength = tree.getBranchLength(lc)*branchRateModel.getBranchRate(tree,lc);
-                double branchRightLength = tree.getBranchLength(rc)*branchRateModel.getBranchRate(tree,rc);
-                double[] probLbranch = msatModel.getColTransitionProbabilities(branchLeftLength, lcState);
-                double[] probRbranch = msatModel.getColTransitionProbabilities(branchRightLength, rcState);
-                double[] lik = new double[msatModel.getDataType().getStateCount()];
-                int currState = (int)parameter.getParameterValue(msatSamplerTreeModel.getParameterIndexFromNodeNumber(postOrder[i]));
-                //if node = root node
-                if(i == postOrder.length -1){
-                    //likelihood of root state also depends on the stationary distribution
-                    double[] statDist = msatModel.getStationaryDistribution();
-                    for(int j = 0; j < lik.length; j++){
-                        lik[j] = probLbranch[j]*probRbranch[j]*statDist[j];
-                    }
-
-                }else{
-
-                    for(int j = 0; j < lik.length; j++){
-                        lik[j] = probLbranch[j]*probRbranch[j];
-                    }
-
-                }
-
-                int sampledState = MathUtils.randomChoicePDF(lik);
-                logq = logq + Math.log(lik[currState]) - Math.log(lik[sampledState]);
-                parameter.setParameterValue(msatSamplerTreeModel.getParameterIndexFromNodeNumber(postOrder[i]),sampledState);
-            }
-        }
-        
-        return logq;
-    }
-
-    public String getPerformanceSuggestion(){
-        return "None";
-    }
-    public String getOperatorName(){
-        return MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR;
-    }
-    public int getStepCount(){
-        return 1;
-    }
-}
+/*
+ * MsatFullAncestryImportanceSamplingOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+import dr.evomodel.tree.TreeModel;
+import dr.evomodel.substmodel.MicrosatelliteModel;
+import dr.evomodel.branchratemodel.BranchRateModel;
+import dr.inference.model.Parameter;
+import dr.evolution.tree.Tree;
+import dr.evolution.tree.NodeRef;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * Produce an importance sample of the ancestry given a msat pattern and a tree.
+ */
+public class MsatFullAncestryImportanceSamplingOperator extends SimpleMCMCOperator {
+
+    public static final String MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR = "MsatFullAncestryImportanceSamplingOperator";
+    private Parameter parameter;
+    private MicrosatelliteSamplerTreeModel msatSamplerTreeModel;
+    private MicrosatelliteModel msatModel;
+    private BranchRateModel branchRateModel;
+
+
+    public MsatFullAncestryImportanceSamplingOperator(
+            Parameter parameter,
+            MicrosatelliteSamplerTreeModel msatSamplerTreeModel,
+            MicrosatelliteModel msatModel,
+            BranchRateModel branchRateModel,
+            double weight){
+
+        super();
+        this.parameter = parameter;
+        this.msatSamplerTreeModel = msatSamplerTreeModel;
+        this.msatModel = msatModel;
+        this.branchRateModel = branchRateModel;
+        setWeight(weight);
+    }
+
+    public double doOperation(){
+        TreeModel tree = msatSamplerTreeModel.getTreeModel();
+
+        //get postOrder
+        int[] postOrder = new int[tree.getNodeCount()];
+        Tree.Utils.postOrderTraversalList(tree,postOrder);
+
+        int extNodeCount = tree.getExternalNodeCount();
+        double logq=0.0;
+        for(int i = 0; i < postOrder.length; i ++){
+
+            //if it's an internal node
+            if(postOrder[i] >= extNodeCount){
+
+                //getLikelihoodGiven the children
+                NodeRef node = tree.getNode(postOrder[i]);
+                NodeRef lc = tree.getChild(node,0);
+                NodeRef rc = tree.getChild(node,1);
+                int lcState = msatSamplerTreeModel.getNodeValue(lc);
+                int rcState = msatSamplerTreeModel.getNodeValue(rc);
+                double branchLeftLength = tree.getBranchLength(lc)*branchRateModel.getBranchRate(tree,lc);
+                double branchRightLength = tree.getBranchLength(rc)*branchRateModel.getBranchRate(tree,rc);
+                double[] probLbranch = msatModel.getColTransitionProbabilities(branchLeftLength, lcState);
+                double[] probRbranch = msatModel.getColTransitionProbabilities(branchRightLength, rcState);
+                double[] lik = new double[msatModel.getDataType().getStateCount()];
+                int currState = (int)parameter.getParameterValue(msatSamplerTreeModel.getParameterIndexFromNodeNumber(postOrder[i]));
+                //if node = root node
+                if(i == postOrder.length -1){
+                    //likelihood of root state also depends on the stationary distribution
+                    double[] statDist = msatModel.getStationaryDistribution();
+                    for(int j = 0; j < lik.length; j++){
+                        lik[j] = probLbranch[j]*probRbranch[j]*statDist[j];
+                    }
+
+                }else{
+
+                    for(int j = 0; j < lik.length; j++){
+                        lik[j] = probLbranch[j]*probRbranch[j];
+                    }
+
+                }
+
+                int sampledState = MathUtils.randomChoicePDF(lik);
+                logq = logq + Math.log(lik[currState]) - Math.log(lik[sampledState]);
+                parameter.setParameterValue(msatSamplerTreeModel.getParameterIndexFromNodeNumber(postOrder[i]),sampledState);
+            }
+        }
+        
+        return logq;
+    }
+
+    public String getPerformanceSuggestion(){
+        return "None";
+    }
+    public String getOperatorName(){
+        return MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR;
+    }
+    public int getStepCount(){
+        return 1;
+    }
+}
diff --git a/src/dr/inference/operators/MsatSingleAncestralStateGibbsOperator.java b/src/dr/evomodel/operators/MsatSingleAncestralStateGibbsOperator.java
similarity index 95%
rename from src/dr/inference/operators/MsatSingleAncestralStateGibbsOperator.java
rename to src/dr/evomodel/operators/MsatSingleAncestralStateGibbsOperator.java
index 81a6746..53613c1 100644
--- a/src/dr/inference/operators/MsatSingleAncestralStateGibbsOperator.java
+++ b/src/dr/evomodel/operators/MsatSingleAncestralStateGibbsOperator.java
@@ -1,125 +1,127 @@
-/*
- * MsatSingleAncestralStateGibbsOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.inference.model.Parameter;
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-import dr.evomodel.tree.TreeModel;
-import dr.evomodel.substmodel.MicrosatelliteModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-import dr.evolution.tree.NodeRef;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- *
- */
-public class MsatSingleAncestralStateGibbsOperator extends SimpleMCMCOperator implements GibbsOperator{
-    public static final String MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR = "MsatSingleAncestralStateGibbsOperator";
-    private Parameter parameter;
-    private MicrosatelliteSamplerTreeModel msatSamplerTreeModel;
-    private MicrosatelliteModel msatModel;
-    private BranchRateModel branchRateModel;
-
-    public MsatSingleAncestralStateGibbsOperator (
-            Parameter parameter,
-            MicrosatelliteSamplerTreeModel msatSamplerTreeModel,
-            MicrosatelliteModel msatModel,
-            BranchRateModel branchRateModel,
-            double weight){
-
-        super();
-        this.parameter = parameter;
-        this.msatSamplerTreeModel = msatSamplerTreeModel;
-        this.msatModel = msatModel;
-        this.branchRateModel = branchRateModel;
-        setWeight(weight);
-    }
-
-    public double doOperation(){
-        TreeModel tree = msatSamplerTreeModel.getTreeModel();
-
-        
-        int index = MathUtils.nextInt(parameter.getDimension());
-
-        //double logq=0.0;
-
-
-        //getLikelihoodGiven the children
-        NodeRef node = tree.getNode(msatSamplerTreeModel.getNodeNumberFromParameterIndex(index));
-        NodeRef lc = tree.getChild(node,0);
-        NodeRef rc = tree.getChild(node,1);
-
-
-        int lcState = msatSamplerTreeModel.getNodeValue(lc);
-        int rcState = msatSamplerTreeModel.getNodeValue(rc);
-
-
-        double branchLeftLength = tree.getBranchLength(lc)*branchRateModel.getBranchRate(tree,lc);
-        double branchRightLength = tree.getBranchLength(rc)*branchRateModel.getBranchRate(tree,rc);
-
-
-        double[] probLbranch = msatModel.getColTransitionProbabilities(branchLeftLength, lcState);
-        double[] probRbranch = msatModel.getColTransitionProbabilities(branchRightLength, rcState);
-
-        double[] lik = new double[msatModel.getDataType().getStateCount()];
-        //int currState = (int)parameter.getParameterValue(index);
-        //if node = root node
-
-        if(tree.isRoot(node)){
-            //likelihood of root state also depends on the stationary distribution
-            double[] statDist = msatModel.getStationaryDistribution();
-            for(int j = 0; j < lik.length; j++){
-                lik[j] = probLbranch[j]*probRbranch[j]*statDist[j];
-            }
-        }else{
-            NodeRef parent = tree.getParent(node);
-            int pState = msatSamplerTreeModel.getNodeValue(parent);
-            double branchParentLength = tree.getBranchLength(node)*branchRateModel.getBranchRate(tree,node);
-            double[] probPbranch = msatModel.getRowTransitionProbabilities(branchParentLength,pState);
-            for(int j = 0; j < lik.length; j++){
-                lik[j] = probLbranch[j]*probRbranch[j]*probPbranch[j];
-            }
-        }
-
-        int sampledState = MathUtils.randomChoicePDF(lik);
-        //logq = logq + Math.log(lik[currState]) - Math.log(lik[sampledState]);
-        parameter.setParameterValue(index,sampledState);
-        return 0.0;
-    }
-
-    public String getPerformanceSuggestion(){
-        return "None";
-    }
-    public String getOperatorName(){
-        return MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR;
-    }
-    public int getStepCount(){
-        return 1;
-    }
-
+/*
+ * MsatSingleAncestralStateGibbsOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.inference.model.Parameter;
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+import dr.evomodel.tree.TreeModel;
+import dr.evomodel.substmodel.MicrosatelliteModel;
+import dr.evomodel.branchratemodel.BranchRateModel;
+import dr.evolution.tree.NodeRef;
+import dr.inference.operators.GibbsOperator;
+import dr.inference.operators.SimpleMCMCOperator;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ *
+ */
+public class MsatSingleAncestralStateGibbsOperator extends SimpleMCMCOperator implements GibbsOperator {
+    public static final String MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR = "MsatSingleAncestralStateGibbsOperator";
+    private Parameter parameter;
+    private MicrosatelliteSamplerTreeModel msatSamplerTreeModel;
+    private MicrosatelliteModel msatModel;
+    private BranchRateModel branchRateModel;
+
+    public MsatSingleAncestralStateGibbsOperator (
+            Parameter parameter,
+            MicrosatelliteSamplerTreeModel msatSamplerTreeModel,
+            MicrosatelliteModel msatModel,
+            BranchRateModel branchRateModel,
+            double weight){
+
+        super();
+        this.parameter = parameter;
+        this.msatSamplerTreeModel = msatSamplerTreeModel;
+        this.msatModel = msatModel;
+        this.branchRateModel = branchRateModel;
+        setWeight(weight);
+    }
+
+    public double doOperation(){
+        TreeModel tree = msatSamplerTreeModel.getTreeModel();
+
+        
+        int index = MathUtils.nextInt(parameter.getDimension());
+
+        //double logq=0.0;
+
+
+        //getLikelihoodGiven the children
+        NodeRef node = tree.getNode(msatSamplerTreeModel.getNodeNumberFromParameterIndex(index));
+        NodeRef lc = tree.getChild(node,0);
+        NodeRef rc = tree.getChild(node,1);
+
+
+        int lcState = msatSamplerTreeModel.getNodeValue(lc);
+        int rcState = msatSamplerTreeModel.getNodeValue(rc);
+
+
+        double branchLeftLength = tree.getBranchLength(lc)*branchRateModel.getBranchRate(tree,lc);
+        double branchRightLength = tree.getBranchLength(rc)*branchRateModel.getBranchRate(tree,rc);
+
+
+        double[] probLbranch = msatModel.getColTransitionProbabilities(branchLeftLength, lcState);
+        double[] probRbranch = msatModel.getColTransitionProbabilities(branchRightLength, rcState);
+
+        double[] lik = new double[msatModel.getDataType().getStateCount()];
+        //int currState = (int)parameter.getParameterValue(index);
+        //if node = root node
+
+        if(tree.isRoot(node)){
+            //likelihood of root state also depends on the stationary distribution
+            double[] statDist = msatModel.getStationaryDistribution();
+            for(int j = 0; j < lik.length; j++){
+                lik[j] = probLbranch[j]*probRbranch[j]*statDist[j];
+            }
+        }else{
+            NodeRef parent = tree.getParent(node);
+            int pState = msatSamplerTreeModel.getNodeValue(parent);
+            double branchParentLength = tree.getBranchLength(node)*branchRateModel.getBranchRate(tree,node);
+            double[] probPbranch = msatModel.getRowTransitionProbabilities(branchParentLength,pState);
+            for(int j = 0; j < lik.length; j++){
+                lik[j] = probLbranch[j]*probRbranch[j]*probPbranch[j];
+            }
+        }
+
+        int sampledState = MathUtils.randomChoicePDF(lik);
+        //logq = logq + Math.log(lik[currState]) - Math.log(lik[sampledState]);
+        parameter.setParameterValue(index,sampledState);
+        return 0.0;
+    }
+
+    public String getPerformanceSuggestion(){
+        return "None";
+    }
+    public String getOperatorName(){
+        return MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR;
+    }
+    public int getStepCount(){
+        return 1;
+    }
+
 }
\ No newline at end of file
diff --git a/src/dr/inference/operators/RandomWalkIntegerNodeHeightWeightedOperator.java b/src/dr/evomodel/operators/RandomWalkIntegerNodeHeightWeightedOperator.java
similarity index 91%
rename from src/dr/inference/operators/RandomWalkIntegerNodeHeightWeightedOperator.java
rename to src/dr/evomodel/operators/RandomWalkIntegerNodeHeightWeightedOperator.java
index c58b2e4..b75265f 100644
--- a/src/dr/inference/operators/RandomWalkIntegerNodeHeightWeightedOperator.java
+++ b/src/dr/evomodel/operators/RandomWalkIntegerNodeHeightWeightedOperator.java
@@ -1,88 +1,89 @@
-/*
- * RandomWalkIntegerNodeHeightWeightedOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.inference.model.Parameter;
-import dr.inferencexml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * The probability an internal node is picked to have its state changed depends on the node height.
- */
-public class RandomWalkIntegerNodeHeightWeightedOperator extends RandomWalkIntegerOperator{
-
-    private Parameter internalNodeHeights;
-
-    public RandomWalkIntegerNodeHeightWeightedOperator(
-            Parameter parameter, int windowSize, double weight, Parameter internalNodeHeights){
-        super(parameter, windowSize, weight);
-        this.internalNodeHeights = internalNodeHeights;
-    }
-
-    public double doOperation() {
-
-        // a random dimension to perturb
-        int index = MathUtils.randomChoicePDF(internalNodeHeights.getParameterValues());
-      
-        int newValue = calculateNewValue(index);
-        parameter.setValue(index, newValue);
-
-        return 0.0;
-    }
-
-    //MCMCOperator INTERFACE
-    public String getOperatorName() {
-        return "randomWalkIntegerNodeHeightWeighted(" + parameter.getId() + ")";
-    }
-
-
-    public double getTargetAcceptanceProbability() {
-        return 0.234;
-    }
-
-    public double getMinimumAcceptanceLevel() {
-        return 0.1;
-    }
-
-    public double getMaximumAcceptanceLevel() {
-        return 0.4;
-    }
-
-    public double getMinimumGoodAcceptanceLevel() {
-        return 0.20;
-    }
-
-    public double getMaximumGoodAcceptanceLevel() {
-        return 0.30;
-    }
-   
-    public String toString() {
-        return RandomWalkIntegerNodeHeightWeightedOperatorParser.RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP +
-                "(" + parameter.getId() + ", " + windowSize + ", " + getWeight() + ")";
-    }
-}
+/*
+ * RandomWalkIntegerNodeHeightWeightedOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.inference.model.Parameter;
+import dr.evomodelxml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser;
+import dr.inference.operators.RandomWalkIntegerOperator;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * The probability an internal node is picked to have its state changed depends on the node height.
+ */
+public class RandomWalkIntegerNodeHeightWeightedOperator extends RandomWalkIntegerOperator {
+
+    private Parameter internalNodeHeights;
+
+    public RandomWalkIntegerNodeHeightWeightedOperator(
+            Parameter parameter, int windowSize, double weight, Parameter internalNodeHeights){
+        super(parameter, windowSize, weight);
+        this.internalNodeHeights = internalNodeHeights;
+    }
+
+    public double doOperation() {
+
+        // a random dimension to perturb
+        int index = MathUtils.randomChoicePDF(internalNodeHeights.getParameterValues());
+      
+        int newValue = calculateNewValue(index);
+        parameter.setValue(index, newValue);
+
+        return 0.0;
+    }
+
+    //MCMCOperator INTERFACE
+    public String getOperatorName() {
+        return "randomWalkIntegerNodeHeightWeighted(" + parameter.getId() + ")";
+    }
+
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+   
+    public String toString() {
+        return RandomWalkIntegerNodeHeightWeightedOperatorParser.RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP +
+                "(" + parameter.getId() + ", " + windowSize + ", " + getWeight() + ")";
+    }
+}
diff --git a/src/dr/inference/operators/RandomWalkIntegerSetSizeWeightedOperator.java b/src/dr/evomodel/operators/RandomWalkIntegerSetSizeWeightedOperator.java
similarity index 93%
rename from src/dr/inference/operators/RandomWalkIntegerSetSizeWeightedOperator.java
rename to src/dr/evomodel/operators/RandomWalkIntegerSetSizeWeightedOperator.java
index c2c8218..95a5798 100644
--- a/src/dr/inference/operators/RandomWalkIntegerSetSizeWeightedOperator.java
+++ b/src/dr/evomodel/operators/RandomWalkIntegerSetSizeWeightedOperator.java
@@ -1,114 +1,115 @@
-/*
- * RandomWalkIntegerSetSizeWeightedOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inference.operators;
-
-import dr.evolution.tree.NodeRef;
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
-import dr.inferencexml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser;
-import dr.math.MathUtils;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * The probability that an index of the internal states parameter picked
- * is weighted by the number of plausible states of a node given the states of its children.
- */
-public class RandomWalkIntegerSetSizeWeightedOperator extends RandomWalkIntegerOperator{
-
-    private MicrosatelliteSamplerTreeModel msatSampleTreeModel;
-    private double[] weights;
-    private double baseSetSize;
-    public RandomWalkIntegerSetSizeWeightedOperator(
-        Parameter parameter,
-        int windowSize,
-        double weight,
-        MicrosatelliteSamplerTreeModel msatSampleTreeModel,
-        double baseIntervalSize){
-        super(parameter, windowSize, weight);
-        this.msatSampleTreeModel = msatSampleTreeModel;
-        this.baseSetSize = baseIntervalSize;
-
-
-    }
-
-    private void computeSampleWeights(){
-        TreeModel tree = msatSampleTreeModel.getTreeModel();
-        int intNodeCount = tree.getInternalNodeCount();
-        int extNodeCount = tree.getExternalNodeCount();
-        weights = new double[intNodeCount];
-        for(int i = 0 ; i < intNodeCount; i++){
-            NodeRef node = tree.getNode(i+extNodeCount);
-            int lcState = msatSampleTreeModel.getNodeValue(tree.getChild(node, 0));
-            int rcState = msatSampleTreeModel.getNodeValue(tree.getChild(node, 1));
-            weights[i] = Math.abs(lcState-rcState)+baseSetSize;
-
-        }
-    }
-
-    public double doOperation() {
-        computeSampleWeights();
-        // a random dimension to perturb
-        int index = MathUtils.randomChoicePDF(weights);
-
-        int newValue = calculateNewValue(index);
-        parameter.setValue(index, newValue);
-
-        return 0.0;
-    }
-
-    //MCMCOperator INTERFACE
-    public String getOperatorName() {
-        return "randomWalkIntegerSetSizeWeighted(" + parameter.getId() + ")";
-    }
-
-
-    public double getTargetAcceptanceProbability() {
-        return 0.234;
-    }
-
-    public double getMinimumAcceptanceLevel() {
-        return 0.1;
-    }
-
-    public double getMaximumAcceptanceLevel() {
-        return 0.4;
-    }
-
-    public double getMinimumGoodAcceptanceLevel() {
-        return 0.20;
-    }
-
-    public double getMaximumGoodAcceptanceLevel() {
-        return 0.30;
-    }
-
-    public String toString() {
-        return RandomWalkIntegerNodeHeightWeightedOperatorParser.RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP + "(" + parameter.getId() + ", " + windowSize + ", " + getWeight() + ")";
-    }
-
-}
+/*
+ * RandomWalkIntegerSetSizeWeightedOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.evolution.tree.NodeRef;
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Parameter;
+import dr.evomodelxml.operators.RandomWalkIntegerNodeHeightWeightedOperatorParser;
+import dr.inference.operators.RandomWalkIntegerOperator;
+import dr.math.MathUtils;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * The probability that an index of the internal states parameter picked
+ * is weighted by the number of plausible states of a node given the states of its children.
+ */
+public class RandomWalkIntegerSetSizeWeightedOperator extends RandomWalkIntegerOperator {
+
+    private MicrosatelliteSamplerTreeModel msatSampleTreeModel;
+    private double[] weights;
+    private double baseSetSize;
+    public RandomWalkIntegerSetSizeWeightedOperator(
+        Parameter parameter,
+        int windowSize,
+        double weight,
+        MicrosatelliteSamplerTreeModel msatSampleTreeModel,
+        double baseIntervalSize){
+        super(parameter, windowSize, weight);
+        this.msatSampleTreeModel = msatSampleTreeModel;
+        this.baseSetSize = baseIntervalSize;
+
+
+    }
+
+    private void computeSampleWeights(){
+        TreeModel tree = msatSampleTreeModel.getTreeModel();
+        int intNodeCount = tree.getInternalNodeCount();
+        int extNodeCount = tree.getExternalNodeCount();
+        weights = new double[intNodeCount];
+        for(int i = 0 ; i < intNodeCount; i++){
+            NodeRef node = tree.getNode(i+extNodeCount);
+            int lcState = msatSampleTreeModel.getNodeValue(tree.getChild(node, 0));
+            int rcState = msatSampleTreeModel.getNodeValue(tree.getChild(node, 1));
+            weights[i] = Math.abs(lcState-rcState)+baseSetSize;
+
+        }
+    }
+
+    public double doOperation() {
+        computeSampleWeights();
+        // a random dimension to perturb
+        int index = MathUtils.randomChoicePDF(weights);
+
+        int newValue = calculateNewValue(index);
+        parameter.setValue(index, newValue);
+
+        return 0.0;
+    }
+
+    //MCMCOperator INTERFACE
+    public String getOperatorName() {
+        return "randomWalkIntegerSetSizeWeighted(" + parameter.getId() + ")";
+    }
+
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+
+    public String toString() {
+        return RandomWalkIntegerNodeHeightWeightedOperatorParser.RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP + "(" + parameter.getId() + ", " + windowSize + ", " + getWeight() + ")";
+    }
+
+}
diff --git a/src/dr/evomodel/operators/SubtreeJumpOperator.java b/src/dr/evomodel/operators/SubtreeJumpOperator.java
index bb41b36..62be1b9 100644
--- a/src/dr/evomodel/operators/SubtreeJumpOperator.java
+++ b/src/dr/evomodel/operators/SubtreeJumpOperator.java
@@ -76,7 +76,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
 
         final NodeRef root = tree.getRoot();
 
-		double  maxHeight = tree.getNodeHeight(root);
+        double  maxHeight = tree.getNodeHeight(root);
 
         NodeRef i;
         NodeRef iP = null;
@@ -85,32 +85,27 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
         double height = Double.NaN;
         List<NodeRef> destinations = null;
 
-        boolean destinationFound = false;
         do {
             // 1. choose a random node avoiding root or child of root
             i = tree.getNode(MathUtils.nextInt(tree.getNodeCount()));
 
-            if (root == i || tree.getParent(i) == root) {
-                continue;
-            }
-
-            iP = tree.getParent(i);
-            CiP = getOtherChild(tree, iP, i);
-            PiP = tree.getParent(iP);
+        } while (root == i || tree.getParent(i) == root);
 
-            // get the height of the parent
-            height = tree.getNodeHeight(iP);
+        iP = tree.getParent(i);
+        CiP = getOtherChild(tree, iP, i);
+        PiP = tree.getParent(iP);
 
-            // get a list of all edges that intersect this height
-            destinations = getIntersectingEdges(tree, height);
+        // get the height of the parent
+        height = tree.getNodeHeight(iP);
 
-            if (destinations.size() > 0) {
-                destinationFound = true;
-            }
+        // get a list of all edges that intersect this height
+        destinations = getIntersectingEdges(tree, height);
 
-        } while (!destinationFound);
+        if (destinations.size() == 0) {
+            throw new OperatorFailedException("No destinations found");
+        }
 
-		double[] pdf = getDestinationProbabilities(tree, i, height, maxHeight, destinations, alpha);
+        double[] pdf = getDestinationProbabilities(tree, i, height, maxHeight, destinations, alpha);
 
         // remove the target node and its sibling (shouldn't be there because their parent's height is exactly equal to the target height).
         destinations.remove(i);
@@ -142,7 +137,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
         tree.endTreeEdit();
 
         final List<NodeRef> reverseDestinations = getIntersectingEdges(tree, height);
-		double reverseProbability = getReverseProbability(tree, CiP, j, height, maxHeight, reverseDestinations, alpha);
+        double reverseProbability = getReverseProbability(tree, CiP, j, height, maxHeight, reverseDestinations, alpha);
 
         // hastings ratio = reverse Prob / forward Prob
         logq = Math.log(reverseProbability) - Math.log(forwardProbability);
@@ -171,7 +166,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
         return intersectingEdges;
     }
 
-	private double[] getDestinationProbabilities(Tree tree, NodeRef node0, double height, double maxAge, List<NodeRef> intersectingEdges, double alpha) {
+    private double[] getDestinationProbabilities(Tree tree, NodeRef node0, double height, double maxAge, List<NodeRef> intersectingEdges, double alpha) {
         double[] weights = new double[intersectingEdges.size()];
         double sum = 0.0;
         int i = 0;
@@ -179,7 +174,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
             assert(node1 != node0);
 
             double age = tree.getNodeHeight(Tree.Utils.getCommonAncestor(tree, node0, node1)) - height;
-			age = age/maxAge;
+            age = age/maxAge;
             weights[i] = getJumpWeight(age, alpha);
             sum += weights[i];
             i++;
@@ -191,7 +186,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
         return weights;
     }
 
-	private double getReverseProbability(Tree tree, NodeRef originalNode, NodeRef targetNode, double height, double maxAge, List<NodeRef> intersectingEdges, double alpha) {
+    private double getReverseProbability(Tree tree, NodeRef originalNode, NodeRef targetNode, double height, double maxAge, List<NodeRef> intersectingEdges, double alpha) {
         double[] weights = new double[intersectingEdges.size()];
         double sum = 0.0;
 
@@ -201,7 +196,7 @@ public class SubtreeJumpOperator extends AbstractTreeOperator implements Coercab
             assert(node1 != targetNode);
 
             double age = tree.getNodeHeight(Tree.Utils.getCommonAncestor(tree, targetNode, node1)) - height;
-			age = age/maxAge;
+            age = age/maxAge;
             weights[i] = getJumpWeight(age, alpha);
             sum += weights[i];
 
diff --git a/src/dr/evomodel/operators/SubtreeLeapOperator.java b/src/dr/evomodel/operators/SubtreeLeapOperator.java
new file mode 100644
index 0000000..0c5b311
--- /dev/null
+++ b/src/dr/evomodel/operators/SubtreeLeapOperator.java
@@ -0,0 +1,321 @@
+/*
+ * SubtreeLeapOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.operators;
+
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.evomodel.tree.TreeModel;
+import dr.evomodelxml.operators.SubtreeLeapOperatorParser;
+import dr.inference.operators.*;
+import dr.math.MathUtils;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Implements the Subtree Leap move.
+ *
+ * This move picks a node at random (except for the root) and then moves the parent to any location
+ * that is a certain patristic distance from its starting point (the distance is drawn from a Gaussian).
+ *
+ * It is always possible for the node to move up (potentially becoming the root) but the destination can't
+ * be younger than the original node. All possible destinations are collected and then picked amongst
+ * uniformly.
+ *
+ * @author Andrew Rambaut
+ * @version $Id$
+ */
+public class SubtreeLeapOperator extends AbstractTreeOperator implements CoercableMCMCOperator {
+
+    private double size = 1.0;
+
+    private final TreeModel tree;
+    private final CoercionMode mode;
+
+    /**
+     * Constructor
+     *
+     * @param tree   the tree
+     * @param weight the weight
+     * @param size   scaling on a unit Gaussian to draw the patristic distance from
+     * @param mode   coercion mode
+     */
+    public SubtreeLeapOperator(TreeModel tree, double weight, double size, CoercionMode mode) {
+        this.tree = tree;
+        setWeight(weight);
+        this.size = size;
+        this.mode = mode;
+    }
+
+
+
+    /**
+     * Do a subtree leap move.
+     *
+     * @return the log-transformed hastings ratio
+     */
+    public double doOperation() throws OperatorFailedException {
+        double logq;
+
+        final double delta = getDelta();
+
+        final NodeRef root = tree.getRoot();
+
+        NodeRef node;
+
+        do {
+            // choose a random node avoiding root
+            node = tree.getNode(MathUtils.nextInt(tree.getNodeCount()));
+
+        } while (node == root);
+
+        // get its parent - this is the node we will prune/graft
+        final NodeRef parent = tree.getParent(node);
+
+        // get the node's sibling
+        final NodeRef sibling = getOtherChild(tree, parent, node);
+
+        // and its grand parent
+        final NodeRef grandParent = tree.getParent(parent);
+
+        final Map<NodeRef, Double> destinations = getDestinations(node, parent, sibling, delta);
+        final List<NodeRef> destinationNodes = new ArrayList<NodeRef>(destinations.keySet());
+
+        // pick uniformly from this list
+        int r = MathUtils.nextInt(destinations.size());
+
+        double forwardProbability = 1.0 / destinations.size();
+
+        final NodeRef j = destinationNodes.get(r);
+        final double newHeight = destinations.get(j);
+
+        final NodeRef jParent = tree.getParent(j);
+
+        if (jParent != null && newHeight > tree.getNodeHeight(jParent)) {
+            throw new IllegalArgumentException("height error");
+        }
+
+        if (newHeight < tree.getNodeHeight(j)) {
+            throw new IllegalArgumentException("height error");
+        }
+
+        tree.beginTreeEdit();
+
+        if (j == parent || jParent == parent) {
+            // the subtree is not actually moving but the height will change
+        } else {
+            if (grandParent == null) {
+                // if the parent of the original node is the root then the sibling becomes
+                // the root.
+                tree.removeChild(parent, sibling);
+                tree.setRoot(sibling);
+
+            } else {
+                // remove the parent of node by connecting its sibling to its grandparent.
+                tree.removeChild(parent, sibling);
+                tree.removeChild(grandParent, parent);
+                tree.addChild(grandParent, sibling);
+            }
+
+            if (jParent == null) {
+                // adding the node to the root of the tree
+                tree.addChild(parent, j);
+                tree.setRoot(parent);
+            } else {
+                // remove destination edge j from its parent
+                tree.removeChild(jParent, j);
+
+                // add destination edge to the parent of node
+                tree.addChild(parent, j);
+
+                // and add the parent of i as a child of the former parent of j
+                tree.addChild(jParent, parent);
+            }
+        }
+        tree.endTreeEdit();
+
+        tree.setNodeHeight(parent, newHeight);
+
+        if (tree.getParent(parent) != null && newHeight > tree.getNodeHeight(tree.getParent(parent))) {
+            throw new IllegalArgumentException("height error");
+        }
+
+        if (newHeight < tree.getNodeHeight(node)) {
+            throw new IllegalArgumentException("height error");
+        }
+
+        if (newHeight < tree.getNodeHeight(getOtherChild(tree, parent, node))) {
+            throw new IllegalArgumentException("height error");
+        }
+
+        final Map<NodeRef, Double> reverseDestinations = getDestinations(node, parent, getOtherChild(tree, parent, node), delta);
+        double reverseProbability = 1.0 / reverseDestinations.size();
+
+        // hastings ratio = reverse Prob / forward Prob
+        logq = Math.log(reverseProbability) - Math.log(forwardProbability);
+        return logq;
+    }
+
+    private Map<NodeRef, Double> getDestinations(NodeRef node, NodeRef parent, NodeRef sibling, double delta) {
+
+        final Map<NodeRef, Double> destinations = new HashMap<NodeRef, Double>();
+
+        // get the parent's height
+        final double height = tree.getNodeHeight(parent);
+
+        final double heightBelow = height - delta;
+
+        if (heightBelow > tree.getNodeHeight(node)) {
+            // the destination height below the parent is compatible with the node
+            // see if there are any destinations on the sibling's branch
+            final List<NodeRef> edges = new ArrayList<NodeRef>();
+
+            getIntersectingEdges(tree, sibling, heightBelow, edges);
+
+            // add the intersecting edges and the height
+            for (NodeRef n : edges) {
+                destinations.put(n, heightBelow);
+            }
+        }
+
+        final double heightAbove = height + delta;
+
+        NodeRef node1 = parent;
+
+        // walk up to root
+        boolean done = false;
+        while (!done) {
+            NodeRef parent1 = tree.getParent(node1);
+
+            if (parent1 != null) {
+                final double height1 = tree.getNodeHeight(parent1);
+                if (height1 < heightAbove) {
+                    // haven't reached the height above the original height so go down
+                    // the sibling subtree
+                    NodeRef sibling1 = getOtherChild(tree, parent1, node1);
+
+                    double heightBelow1 = height1 - (heightAbove - height1);
+
+                    if (heightBelow1 > tree.getNodeHeight(node)) {
+
+                        final List<NodeRef> edges = new ArrayList<NodeRef>();
+
+                        getIntersectingEdges(tree, sibling1, heightBelow1, edges);
+
+                        // add the intersecting edges and the height
+                        for (NodeRef n : edges) {
+                            destinations.put(n, heightBelow1);
+                        }
+                    }
+                } else {
+                    // add the current node as a destination
+                    destinations.put(node1, heightAbove);
+                    done = true;
+                }
+
+                node1 = parent1;
+            } else {
+                // node1 is the root - add it as a destination and stop loop
+                destinations.put(node1, heightAbove);
+                done = true;
+            }
+        }
+
+        return destinations;
+    }
+
+    private double getDelta() {
+        return Math.abs(MathUtils.nextGaussian() * size);
+    }
+
+    private int getIntersectingEdges(Tree tree, NodeRef node, double height, List<NodeRef> edges) {
+
+        final NodeRef parent = tree.getParent(node);
+
+        if (tree.getNodeHeight(parent) < height) return 0;
+
+        if (tree.getNodeHeight(node) < height) {
+            edges.add(node);
+            return 1;
+        }
+
+        int count = 0;
+        for (int i = 0; i < tree.getChildCount(node); i++) {
+            count += getIntersectingEdges(tree, tree.getChild(node, i), height, edges);
+        }
+        return count;
+    }
+
+    public double getSize() {
+        return size;
+    }
+
+    public void setSize(double size) {
+        this.size = size;
+    }
+
+    public double getCoercableParameter() {
+        return Math.log(getSize());
+    }
+
+    public void setCoercableParameter(double value) {
+        setSize(Math.exp(value));
+    }
+
+    public double getRawParameter() {
+        return getSize();
+    }
+
+    public CoercionMode getMode() {
+        return mode;
+    }
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+
+    public String getPerformanceSuggestion() {
+        double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+
+        double ws = OperatorUtils.optimizeWindowSize(getSize(), Double.MAX_VALUE, prob, targetProb);
+
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try decreasing size to about " + ws;
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try increasing size to about " + ws;
+        } else return "";
+    }
+
+    public String getOperatorName() {
+        return SubtreeLeapOperatorParser.SUBTREE_LEAP + "(" + tree.getId() + ")";
+    }
+
+
+}
\ No newline at end of file
diff --git a/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java b/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
index 5c402ab..60dde65 100644
--- a/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
+++ b/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
@@ -40,6 +40,8 @@ import dr.math.matrixAlgebra.RobustEigenDecomposition;
 import dr.math.matrixAlgebra.RobustSingularValueDecomposition;
 
 import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * <b>A general irreversible class for any
@@ -563,6 +565,11 @@ public class ComplexSubstitutionModel extends AbstractSubstitutionModel implemen
     }
 
     @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
+    @Override
     public boolean isUsed() {
         return super.isUsed() && isUsed;
     }
diff --git a/src/dr/evomodel/substmodel/EmpiricalCodonModel.java b/src/dr/evomodel/substmodel/EmpiricalCodonModel.java
index 9e13941..7af1daa 100644
--- a/src/dr/evomodel/substmodel/EmpiricalCodonModel.java
+++ b/src/dr/evomodel/substmodel/EmpiricalCodonModel.java
@@ -28,8 +28,6 @@ package dr.evomodel.substmodel;
 import dr.evolution.datatype.AminoAcids;
 import dr.evolution.datatype.Codons;
 import dr.evolution.datatype.Nucleotides;
-import dr.evomodel.substmodel.FrequencyModel;
-import dr.evomodel.substmodel.AbstractCodonModel;
 import dr.evomodelxml.substmodel.EmpiricalCodonModelParser;
 import dr.inference.model.Parameter;
 import java.util.logging.Logger;
diff --git a/src/dr/evomodel/substmodel/HKY.java b/src/dr/evomodel/substmodel/HKY.java
index a2e4691..0f40232 100644
--- a/src/dr/evomodel/substmodel/HKY.java
+++ b/src/dr/evomodel/substmodel/HKY.java
@@ -25,7 +25,6 @@
 
 package dr.evomodel.substmodel;
 
-import dr.evomodel.substmodel.NucModelType;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 import dr.inference.model.Variable;
diff --git a/src/dr/evomodel/substmodel/PCACodonModel.java b/src/dr/evomodel/substmodel/PCACodonModel.java
index 8d486da..ee76fcb 100644
--- a/src/dr/evomodel/substmodel/PCACodonModel.java
+++ b/src/dr/evomodel/substmodel/PCACodonModel.java
@@ -26,10 +26,7 @@
 package dr.evomodel.substmodel;
 
 import dr.evolution.datatype.Codons;
-import dr.evomodel.substmodel.FrequencyModel;
-import dr.evomodel.substmodel.AbstractCodonModel;
 import dr.evomodelxml.substmodel.PCACodonModelParser;
-import dr.evomodel.substmodel.AbstractPCARateMatrix;
 import dr.inference.model.Parameter;
 
 /**
diff --git a/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java b/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
index a8f8c4d..895d6ae 100644
--- a/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
+++ b/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
@@ -26,11 +26,13 @@
 package dr.evomodel.substmodel;
 
 import dr.evolution.datatype.*;
-import dr.evomodelxml.substmodel.GeneralSubstitutionModelParser;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 import dr.inference.model.*;
-import dr.xml.*;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * <b>A general model of sequence substitution with stochastic variable selection</b>. A general reversible class for any
@@ -167,7 +169,12 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
             }
         }
     }
-    
+
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
     @Override
     public boolean isUsed() {
         return super.isUsed() && isUsed;
diff --git a/src/dr/evomodel/substmodel/SubstitutionEpochModel.java b/src/dr/evomodel/substmodel/SubstitutionEpochModel.java
index 7c7067e..3fe0382 100644
--- a/src/dr/evomodel/substmodel/SubstitutionEpochModel.java
+++ b/src/dr/evomodel/substmodel/SubstitutionEpochModel.java
@@ -25,15 +25,12 @@
 
 package dr.evomodel.substmodel;
 
-import dr.app.beagle.evomodel.sitemodel.EpochBranchSubstitutionModel;
 import dr.evolution.datatype.DataType;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.matrixAlgebra.Vector;
-import dr.xml.*;
 
-import java.util.ArrayList;
 import java.util.List;
 
 /**
diff --git a/src/dr/evomodel/tree/HiddenLinkageModel.java b/src/dr/evomodel/tree/HiddenLinkageModel.java
index 0e5527e..dd46f5e 100644
--- a/src/dr/evomodel/tree/HiddenLinkageModel.java
+++ b/src/dr/evomodel/tree/HiddenLinkageModel.java
@@ -107,10 +107,15 @@ public class HiddenLinkageModel extends TipStatesModel implements PatternList
 	double[] tipMatrix;
 	double[] internalMatrix;
 
+	@Override
+	public boolean areUnique() {
+		return false;
+	}
+
 	/*
-	 * Initializes a likelihoodCore to calculate likelihoods for 
-	 * the tips
-	 */
+         * Initializes a likelihoodCore to calculate likelihoods for
+         * the tips
+         */
 	private void initCore(){
 		if(data.getAlignment().getDataType() instanceof dr.evolution.datatype.Nucleotides)
 			core = new NativeNucleotideLikelihoodCore();
diff --git a/src/dr/evomodel/tree/TreeModel.java b/src/dr/evomodel/tree/TreeModel.java
index 0195742..ba9d756 100644
--- a/src/dr/evomodel/tree/TreeModel.java
+++ b/src/dr/evomodel/tree/TreeModel.java
@@ -67,7 +67,7 @@ public class TreeModel extends AbstractModel implements MultivariateTraitTree {
 
     public TreeModel(String id, Tree tree, boolean fixHeights) {
 
-        this(TREE_MODEL, tree, false, fixHeights);
+        this(id, tree, false, fixHeights);
         setId(id);
     }
 
diff --git a/src/dr/evomodel/tree/TreeParameterModel.java b/src/dr/evomodel/tree/TreeParameterModel.java
index bff535d..c366c7c 100644
--- a/src/dr/evomodel/tree/TreeParameterModel.java
+++ b/src/dr/evomodel/tree/TreeParameterModel.java
@@ -51,8 +51,7 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
     private final Parameter parameter;
 
     // the index of the root node.
-    private int rootNodeNumber;
-    private int storedRootNodeNumber;
+    private final Parameter rootNodeNumber;
 
     private boolean includeRoot = false;
 
@@ -100,8 +99,9 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
         addModel(tree);
         addVariable(parameter);
 
-        rootNodeNumber = tree.getRoot().getNumber();
-        storedRootNodeNumber = rootNodeNumber;
+        rootNodeNumber = new Parameter.Default(parameter.getId() + ".rootNodeNumber");
+        rootNodeNumber.setParameterValue(0, tree.getRoot().getNumber());
+        addVariable(rootNodeNumber);
     }
 
     public int getParameterSize() {
@@ -127,11 +127,11 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
     }
 
     protected void storeState() {
-        storedRootNodeNumber = rootNodeNumber;
+        //rootNodeNumber.storeParameterValues();
     }
 
     protected void restoreState() {
-        rootNodeNumber = storedRootNodeNumber;
+        //rootNodeNumber.restoreParameterValues();
     }
 
     protected void acceptState() {
@@ -145,7 +145,7 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
 
         assert (!tree.isRoot(node) || includeRoot) : "root node doesn't have a parameter value!";
 
-        assert tree.getRoot().getNumber() == rootNodeNumber :
+        assert !includeRoot || tree.getRoot().getNumber() == rootNodeNumber.getValue(0).intValue() :
                 "INTERNAL ERROR! node with number " + rootNodeNumber + " should be the root node.";
 
         int nodeNumber = node.getNumber();
@@ -157,7 +157,7 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
 
         assert (!tree.isRoot(node) && !includeRoot) : "root node doesn't have a parameter value!";
 
-        assert tree.getRoot().getNumber() == rootNodeNumber :
+        assert tree.getRoot().getNumber() == rootNodeNumber.getValue(0).intValue() :
                 "INTERNAL ERROR! node with number " + rootNodeNumber + " should be the root node.";
 
         int nodeNumber = node.getNumber();
@@ -179,32 +179,33 @@ public class TreeParameterModel extends AbstractModel implements TreeTrait<Doubl
 
         if (!includeRoot) {
 
+            final int oldRootNodeNumber = rootNodeNumber.getValue(0).intValue();
             final int newRootNodeNumber = tree.getRoot().getNumber();
 
-            if (rootNodeNumber > newRootNodeNumber) {
+            if (oldRootNodeNumber > newRootNodeNumber) {
 
                 final double oldValue = parameter.getParameterValue(newRootNodeNumber);
 
-                final int end = Math.min(parameter.getDimension() - 1, rootNodeNumber);
+                final int end = Math.min(parameter.getDimension() - 1, oldRootNodeNumber);
                 for (int i = newRootNodeNumber; i < end; i++) {
                     parameter.setParameterValue(i, parameter.getParameterValue(i + 1));
                 }
 
                 parameter.setParameterValue(end, oldValue);
 
-            } else if (rootNodeNumber < newRootNodeNumber) {
+            } else if (oldRootNodeNumber < newRootNodeNumber) {
 
                 final int end = Math.min(parameter.getDimension() - 1, newRootNodeNumber);
 
                 final double oldValue = parameter.getParameterValue(end);
 
-                for (int i = end; i > rootNodeNumber; i--) {
+                for (int i = end; i > oldRootNodeNumber; i--) {
                     parameter.setParameterValue(i, parameter.getParameterValue(i - 1));
                 }
 
-                parameter.setParameterValue(rootNodeNumber, oldValue);
+                parameter.setParameterValue(oldRootNodeNumber, oldValue);
             }
-            rootNodeNumber = newRootNodeNumber;
+            rootNodeNumber.setParameterValue(0, newRootNodeNumber);
         }
     }
 
diff --git a/src/dr/evomodelxml/EmpiricalTreeDistributionModelParser.java b/src/dr/evomodelxml/EmpiricalTreeDistributionModelParser.java
index 13d1c44..3270e2e 100644
--- a/src/dr/evomodelxml/EmpiricalTreeDistributionModelParser.java
+++ b/src/dr/evomodelxml/EmpiricalTreeDistributionModelParser.java
@@ -71,7 +71,7 @@ public class EmpiricalTreeDistributionModelParser extends AbstractXMLObjectParse
         try {
             FileReader reader = new FileReader(file);
             NexusImporter importer = new NexusImporter(reader);
-            trees = importer.importTrees(taxa);
+            trees = importer.importTrees(taxa, true); // Re-order taxon numbers to original TaxonList order
 
         } catch (FileNotFoundException e) {
             throw new XMLParseException(e.getMessage());
diff --git a/src/dr/evomodelxml/TreeWorkingPriorParsers.java b/src/dr/evomodelxml/TreeWorkingPriorParsers.java
index f96b706..35ddee8 100644
--- a/src/dr/evomodelxml/TreeWorkingPriorParsers.java
+++ b/src/dr/evomodelxml/TreeWorkingPriorParsers.java
@@ -68,7 +68,7 @@ import dr.xml.XMLSyntaxRule;
  */
 public class TreeWorkingPriorParsers {
 
-    public final static boolean DEBUG = true;
+    public final static boolean DEBUG = false;
 
     public static final String CONSTANT_TREE_TOPOLOGY_PRIOR = "constantTreeTopologyPrior";
     public static final String CONTEMPORANEOUS_COALESCENT_CONSTANT = "contemporaneousCoalescentConstantLikelihood";
@@ -255,7 +255,9 @@ public class TreeWorkingPriorParsers {
 
                 int traceIndexParameter = -1;
 
-                System.out.println("Looking for the following column:" + parameterName);
+                if (DEBUG) {
+                    System.err.println("Looking for the following column:" + parameterName);
+                }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
                     if (traceName.trim().equals(parameterName)) {
@@ -263,10 +265,12 @@ public class TreeWorkingPriorParsers {
                         break;
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 if (traceIndexParameter == -1) {
                     throw new XMLParseException("Not all traces could be linked to the required columns.");
-                } else {
+                } else if (DEBUG) {
                     System.out.println("  traceIndexParameter: " + traceIndexParameter);
                 }
 
@@ -279,7 +283,9 @@ public class TreeWorkingPriorParsers {
                     posteriorMean += parameterSamples[i];
                 }
                 posteriorMean /= ((double)parameterSamples.length);
-                System.err.println("Variable column -> " + posteriorMean);
+                if (DEBUG) {
+                    System.err.println("Variable column -> " + posteriorMean);
+                }
                 //posteriorMean = Math.log(posteriorMean);
                 //System.err.println("Log transformed: " + posteriorMean);
 
@@ -366,9 +372,11 @@ public class TreeWorkingPriorParsers {
                 for (int i = 1; i <= columnNames.length; i++) {
                     columnNames[i-1] = parameterName + i;
                 }
-                System.out.println("Looking for the following columns:");
-                for (int i = 0; i < columnNames.length; i++) {
-                    System.out.println("  " + columnNames[i]);
+                if (DEBUG) {
+                    System.err.println("Looking for the following columns:");
+                    for (int i = 0; i < columnNames.length; i++) {
+                        System.err.println("  " + columnNames[i]);
+                    }
                 }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
@@ -379,12 +387,15 @@ public class TreeWorkingPriorParsers {
                         }
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 for (int i = 0; i < traceIndexParameter.length; i++) {
                     if (traceIndexParameter[i] == -1) {
                         throw new XMLParseException("Not all traces could be linked to the required columns, problem with trace index: " + i + "; traceIndexParameter.length = " + traceIndexParameter.length);
+                    } else if (DEBUG) {
+                        System.err.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i]);
                     }
-                    System.out.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i] );
                 }
 
                 Double[][] parameterSamples = new Double[dimension][traces.getStateCount()];
@@ -405,21 +416,12 @@ public class TreeWorkingPriorParsers {
 
                     //mean = 1.0/mean;
 
-                    System.err.println("Variable column: " + i + " -> " + mean);
+                    if (DEBUG) {
+                        System.err.println("Variable column: " + i + " -> " + mean);
+                    }
                     posteriorMeans[i] = Math.log(mean);
                 }
 
-                //Test: hard coding posterior means
-                /*posteriorMeans[0] = 3.667;
-				posteriorMeans[1] = 3.245;
-				posteriorMeans[2] = 3.196;
-				posteriorMeans[3] = 3.167;
-				posteriorMeans[4] = 3.17;
-				posteriorMeans[5] = 3.126;
-				posteriorMeans[6] = 3.422;
-				posteriorMeans[7] = 3.412;
-				posteriorMeans[8] = 3.058;*/
-
                 return new ExponentialProductPosteriorMeansLikelihood(treeModel, posteriorMeans);
 
             } catch (FileNotFoundException fnfe) {
@@ -503,9 +505,11 @@ public class TreeWorkingPriorParsers {
                 for (int i = 1; i <= columnNames.length; i++) {
                     columnNames[i-1] = parameterName + i;
                 }
-                System.out.println("Looking for the following columns:");
-                for (int i = 0; i < columnNames.length; i++) {
-                    System.out.println("  " + columnNames[i]);
+                if (DEBUG) {
+                    System.err.println("Looking for the following columns:");
+                    for (int i = 0; i < columnNames.length; i++) {
+                        System.err.println("  " + columnNames[i]);
+                    }
                 }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
@@ -516,12 +520,16 @@ public class TreeWorkingPriorParsers {
                         }
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 for (int i = 0; i < traceIndexParameter.length; i++) {
                     if (traceIndexParameter[i] == -1) {
                         throw new XMLParseException("Not all traces could be linked to the required columns, problem with trace index: " + i + "; traceIndexParameter.length = " + traceIndexParameter.length);
                     }
-                    System.out.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i] );
+                    if (DEBUG) {
+                        System.err.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i]);
+                    }
                 }
 
                 Double[][] parameterSamples = new Double[dimension][traces.getStateCount()];
@@ -542,15 +550,19 @@ public class TreeWorkingPriorParsers {
 
                     //mean = 1.0/mean;
 
-                    System.err.println("Variable column: " + i + " -> " + mean);
+                    if (DEBUG) {
+                        System.err.println("Variable column: " + i + " -> " + mean);
+                    }
                     posteriorMeans[i] = Math.log(mean);
                 }
 
                 try {
                     //Print log posterior means
-                    System.err.println("Log Posterior Means:");
-                    for (int i = 0; i < posteriorMeans.length; i++) {
-                        System.err.println(posteriorMeans[i]);
+                    if (DEBUG) {
+                        System.err.println("Log Posterior Means:");
+                        for (int i = 0; i < posteriorMeans.length; i++) {
+                            System.err.println(posteriorMeans[i]);
+                        }
                     }
                     //Call Loess interpolator here
                     LoessInterpolator loess = new LoessInterpolator(1.0, 2);
@@ -560,9 +572,12 @@ public class TreeWorkingPriorParsers {
                     }
 
                     double[] loessOutput = loess.smooth(xvalues, posteriorMeans);
-                    System.err.println("Loess output:");
-                    for (int i = 0; i < loessOutput.length; i++) {
-                        System.err.println(loessOutput[i]);
+
+                    if (DEBUG) {
+                        System.err.println("Loess output:");
+                        for (int i = 0; i < loessOutput.length; i++) {
+                            System.err.println(loessOutput[i]);
+                        }
                     }
 
                     posteriorMeans = loessOutput;
@@ -659,9 +674,11 @@ public class TreeWorkingPriorParsers {
                 for (int i = 1; i <= columnNames.length; i++) {
                     columnNames[i-1] = parameterName + i;
                 }
-                System.out.println("Looking for the following columns:");
-                for (int i = 0; i < columnNames.length; i++) {
-                    System.out.println("  " + columnNames[i]);
+                if (DEBUG) {
+                    System.err.println("Looking for the following columns:");
+                    for (int i = 0; i < columnNames.length; i++) {
+                        System.err.println("  " + columnNames[i]);
+                    }
                 }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
@@ -672,12 +689,16 @@ public class TreeWorkingPriorParsers {
                         }
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 for (int i = 0; i < traceIndexParameter.length; i++) {
                     if (traceIndexParameter[i] == -1) {
                         throw new XMLParseException("Not all traces could be linked to the required columns.");
                     }
-                    System.out.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i] );
+                    if (DEBUG) {
+                        System.err.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i]);
+                    }
                 }
 
                 Double[][] parameterSamples = new Double[dimension][traces.getStateCount()];
@@ -695,7 +716,9 @@ public class TreeWorkingPriorParsers {
                         mean += parameterSamples[i][j];
                     }
                     mean /= ((double)parameterSamples[i].length);
-                    System.err.println("Variable column: " + i + " -> " + mean);
+                    if (DEBUG) {
+                        System.err.println("Variable column: " + i + " -> " + mean);
+                    }
                     posteriorMeans[i] = Math.log(mean);
                 }
 
@@ -782,9 +805,11 @@ public class TreeWorkingPriorParsers {
                 for (int i = 1; i <= columnNames.length; i++) {
                     columnNames[i-1] = parameterName + i;
                 }
-                System.out.println("Looking for the following columns:");
-                for (int i = 0; i < columnNames.length; i++) {
-                    System.out.println("  " + columnNames[i]);
+                if (DEBUG) {
+                    System.err.println("Looking for the following columns:");
+                    for (int i = 0; i < columnNames.length; i++) {
+                        System.err.println("  " + columnNames[i]);
+                    }
                 }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
@@ -795,12 +820,16 @@ public class TreeWorkingPriorParsers {
                         }
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 for (int i = 0; i < traceIndexParameter.length; i++) {
                     if (traceIndexParameter[i] == -1) {
                         throw new XMLParseException("Not all traces could be linked to the required columns.");
                     }
-                    System.out.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i] );
+                    if (DEBUG) {
+                        System.err.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i]);
+                    }
                 }
 
                 boolean[] flags = new boolean[dimension];
@@ -846,7 +875,9 @@ public class TreeWorkingPriorParsers {
                         //scales[i] = variance/mean;
                         //shapes[i] = mean/scales[i];
                         //System.err.println("Variable column: " + i + " -> " + shapes[i] + "   " + scales[i]);
-                        System.err.println("Variable column: " + i + " -> " + means[i] + "   " + variances[i]);
+                        if (DEBUG) {
+                            System.err.println("Variable column: " + i + " -> " + means[i] + "   " + variances[i]);
+                        }
                     } else {
                         //constant column
                         double mean = 0.0;
@@ -861,7 +892,9 @@ public class TreeWorkingPriorParsers {
                         //System.err.println("mean = " + mean + "   variance = " + variance);
                         //scales[i] = variance/mean;
                         //shapes[i] = mean/scales[i];
-                        System.err.println("Constant column: " + i + " -> " + means[i] + "   " + variances[i]);
+                        if (DEBUG) {
+                            System.err.println("Constant column: " + i + " -> " + means[i] + "   " + variances[i]);
+                        }
                     } 
                 }
 
@@ -979,9 +1012,11 @@ public class TreeWorkingPriorParsers {
                 for (int i = 1; i <= columnNames.length; i++) {
                     columnNames[i-1] = parameterName + i;
                 }
-                System.out.println("Looking for the following columns:");
-                for (int i = 0; i < columnNames.length; i++) {
-                    System.out.println("  " + columnNames[i]);
+                if (DEBUG) {
+                    System.err.println("Looking for the following columns:");
+                    for (int i = 0; i < columnNames.length; i++) {
+                        System.err.println("  " + columnNames[i]);
+                    }
                 }
                 for (int i = 0; i < traces.getTraceCount(); i++) {
                     String traceName = traces.getTraceName(i);
@@ -992,12 +1027,16 @@ public class TreeWorkingPriorParsers {
                         }
                     }
                 }
-                System.out.println("Overview of traceIndexParameter:");
+                if (DEBUG) {
+                    System.err.println("Overview of traceIndexParameter:");
+                }
                 for (int i = 0; i < traceIndexParameter.length; i++) {
                     if (traceIndexParameter[i] == -1) {
                         throw new XMLParseException("Not all traces could be linked to the required columns.");
                     }
-                    System.out.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i] );
+                    if (DEBUG) {
+                        System.err.println("  traceIndexParameter[" + i + "]: " + traceIndexParameter[i]);
+                    }
                 }
 
                 boolean[] flags = new boolean[dimension];
@@ -1036,7 +1075,9 @@ public class TreeWorkingPriorParsers {
                         variance /= ((double)(parameterSamples[i].length-1));
                         scales[i] = variance/mean;
                         shapes[i] = mean/scales[i];
-                        System.err.println("Variable column: " + i + " -> " + shapes[i] + "   " + scales[i]);
+                        if (DEBUG) {
+                            System.err.println("Variable column: " + i + " -> " + shapes[i] + "   " + scales[i]);
+                        }
                     } /*else {
                     	   double mean = 0.0;
                            for (int j = 0; j < parameterSamples[i].length; j++) {
@@ -1052,10 +1093,12 @@ public class TreeWorkingPriorParsers {
                        } */
                 }
 
-                System.err.println("Columns to be evaluated:");
-                for (int i = 0; i < flags.length; i++) {
-                    if (flags[i]) {
-                        System.err.println("Column " + i);
+                if (DEBUG) {
+                    System.err.println("Columns to be evaluated:");
+                    for (int i = 0; i < flags.length; i++) {
+                        if (flags[i]) {
+                            System.err.println("Column " + i);
+                        }
                     }
                 }
 
diff --git a/src/dr/evomodelxml/branchratemodel/ContinuousBranchRatesParser.java b/src/dr/evomodelxml/branchratemodel/ContinuousBranchRatesParser.java
index 74211ef..768c645 100644
--- a/src/dr/evomodelxml/branchratemodel/ContinuousBranchRatesParser.java
+++ b/src/dr/evomodelxml/branchratemodel/ContinuousBranchRatesParser.java
@@ -40,13 +40,11 @@ public class ContinuousBranchRatesParser extends AbstractXMLObjectParser {
 
     public static final String CONTINUOUS_BRANCH_RATES = "continuousBranchRates";
     public static final String DISTRIBUTION = "distribution";
-    //public static final String RATE_CATEGORIES = "rateCategories";
     public static final String RATE_CATEGORY_QUANTILES = "rateCategoryQuantiles";
+    public static final String RATE_QUANTILES = "rateQuantiles";
     public static final String SINGLE_ROOT_RATE = "singleRootRate";
-    //public static final String OVERSAMPLING = "overSampling";
     public static final String NORMALIZE = "normalize";
     public static final String NORMALIZE_BRANCH_RATE_TO = "normalizeBranchRateTo";
-    //public static final String NORMALIZED_MEAN = "normalizedMean";
 
 
     public String getParserName() {
@@ -55,36 +53,31 @@ public class ContinuousBranchRatesParser extends AbstractXMLObjectParser {
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
-        //final int overSampling = xo.getAttribute(OVERSAMPLING, 1);
         final boolean normalize = xo.getAttribute(NORMALIZE, false);
         final double normalizeBranchRateTo = xo.getAttribute(NORMALIZE_BRANCH_RATE_TO, Double.NaN);
 
         TreeModel tree = (TreeModel) xo.getChild(TreeModel.class);
         ParametricDistributionModel distributionModel = (ParametricDistributionModel) xo.getElementFirstChild(DISTRIBUTION);
 
-        //Parameter rateCategoryParameter = (Parameter) xo.getElementFirstChild(RATE_CATEGORIES);
-
-        Parameter rateCategoryQuantilesParameter = (Parameter) xo.getElementFirstChild(RATE_CATEGORY_QUANTILES);
+        Parameter rateQuantilesParameter;
+        if (xo.hasChildNamed(RATE_QUANTILES)) {
+            rateQuantilesParameter = (Parameter) xo.getElementFirstChild(RATE_QUANTILES);
+        } else {
+            rateQuantilesParameter = (Parameter) xo.getElementFirstChild(RATE_CATEGORY_QUANTILES);
+        }
 
         Logger.getLogger("dr.evomodel").info("Using continuous relaxed clock model.");
-        //Logger.getLogger("dr.evomodel").info("  over sampling = " + overSampling);
         Logger.getLogger("dr.evomodel").info("  parametric model = " + distributionModel.getModelName());
-        //Logger.getLogger("dr.evomodel").info("   rate categories = " + rateCategoryParameter.getDimension());
-        Logger.getLogger("dr.evomodel").info("   rate categories = " + rateCategoryQuantilesParameter.getDimension());
+        Logger.getLogger("dr.evomodel").info("   rate categories = " + rateQuantilesParameter.getDimension());
         if(normalize) {
             Logger.getLogger("dr.evomodel").info("   mean rate is normalized to " + normalizeBranchRateTo);
         }
 
         if (xo.hasAttribute(SINGLE_ROOT_RATE)) {
-            //singleRootRate = xo.getBooleanAttribute(SINGLE_ROOT_RATE);
             Logger.getLogger("dr.evomodel").warning("   WARNING: single root rate is not implemented!");
         }
 
-        /* if (xo.hasAttribute(NORMALIZED_MEAN)) {
-            dbr.setNormalizedMean(xo.getDoubleAttribute(NORMALIZED_MEAN));
-        }*/
-
-        return new ContinuousBranchRates(tree, /*rateCategoryParameter, */rateCategoryQuantilesParameter, distributionModel, /*overSampling,*/ normalize, normalizeBranchRateTo);
+        return new ContinuousBranchRates(tree, rateQuantilesParameter, distributionModel, normalize, normalizeBranchRateTo);
     }
 
     //************************************************************************
@@ -93,8 +86,7 @@ public class ContinuousBranchRatesParser extends AbstractXMLObjectParser {
 
     public String getParserDescription() {
         return
-                "This element returns a continuous relaxed clock model." +
-                        "The branch rates are drawn from a continuous parametric distribution.";
+                "This element returns a continuous quantile uncorrelated relaxed clock model.";
     }
 
     public Class getReturnType() {
@@ -107,13 +99,13 @@ public class ContinuousBranchRatesParser extends AbstractXMLObjectParser {
 
     private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
             AttributeRule.newBooleanRule(SINGLE_ROOT_RATE, true, "Whether only a single rate should be used for the two children branches of the root"),
-            //AttributeRule.newDoubleRule(NORMALIZED_MEAN, true, "The mean rate to constrain branch rates to once branch lengths are taken into account"),
-            //AttributeRule.newIntegerRule(OVERSAMPLING, true, "The integer factor for oversampling the distribution model (1 means no oversampling)"),
             AttributeRule.newBooleanRule(NORMALIZE, true, "Whether the mean rate has to be normalized to a particular value"),
             AttributeRule.newDoubleRule(NORMALIZE_BRANCH_RATE_TO, true, "The mean rate to normalize to, if normalizing"),
             new ElementRule(TreeModel.class),
             new ElementRule(DISTRIBUTION, ParametricDistributionModel.class, "The distribution model for rates among branches", false),
-            /*new ElementRule(RATE_CATEGORIES, Parameter.class, "The rate categories parameter", false),      */
-            new ElementRule(RATE_CATEGORY_QUANTILES, Parameter.class, "The quantiles for", false),
+            new XORRule(
+                    new ElementRule(RATE_QUANTILES, Parameter.class, "The quantiles for each branch", false),
+                    new ElementRule(RATE_CATEGORY_QUANTILES, Parameter.class, "The quantiles for each branch", false)
+            )
     };
 }
\ No newline at end of file
diff --git a/src/dr/evomodelxml/branchratemodel/DiscretizedBranchRatesParser.java b/src/dr/evomodelxml/branchratemodel/DiscretizedBranchRatesParser.java
index 17da315..4d341a7 100644
--- a/src/dr/evomodelxml/branchratemodel/DiscretizedBranchRatesParser.java
+++ b/src/dr/evomodelxml/branchratemodel/DiscretizedBranchRatesParser.java
@@ -47,6 +47,8 @@ public class DiscretizedBranchRatesParser extends AbstractXMLObjectParser {
     public static final String NORMALIZE_BRANCH_RATE_TO = "normalizeBranchRateTo";
     public static final String RANDOMIZE_RATES = "randomizeRates";
     public static final String KEEP_RATES = "keepRates";
+    public static final String CACHED_RATES = "cachedRates";
+
     //public static final String NORMALIZED_MEAN = "normalizedMean";
 
 
@@ -84,9 +86,11 @@ public class DiscretizedBranchRatesParser extends AbstractXMLObjectParser {
             Logger.getLogger("dr.evomodel").warning("   WARNING: single root rate is not implemented!");
         }
 
-        final boolean randomizeRates = xo.getAttribute(RANDOMIZE_RATES, false);
+        final boolean randomizeRates = xo.getAttribute(RANDOMIZE_RATES, true);
         final boolean keepRates = xo.getAttribute(KEEP_RATES, false);
 
+        final boolean cachedRates = xo.getAttribute(CACHED_RATES, false);
+
         if (randomizeRates && keepRates) {
             throw new XMLParseException("Unable to both randomize and keep current rate categories");
         }
@@ -96,7 +100,7 @@ public class DiscretizedBranchRatesParser extends AbstractXMLObjectParser {
         }*/
 
         return new DiscretizedBranchRates(tree, rateCategoryParameter, distributionModel, overSampling, normalize,
-                normalizeBranchRateTo, randomizeRates, keepRates);
+                normalizeBranchRateTo, randomizeRates, keepRates, cachedRates);
     }
 
     //************************************************************************
@@ -125,6 +129,7 @@ public class DiscretizedBranchRatesParser extends AbstractXMLObjectParser {
             AttributeRule.newDoubleRule(NORMALIZE_BRANCH_RATE_TO, true, "The mean rate to normalize to, if normalizing"),
             AttributeRule.newBooleanRule(RANDOMIZE_RATES, true, "Randomize initial categories"),
             AttributeRule.newBooleanRule(KEEP_RATES, true, "Keep current rate category specification"),
+            AttributeRule.newBooleanRule(CACHED_RATES, true, "Cache rates between steps (default off)"),
             new ElementRule(TreeModel.class),
             new ElementRule(DISTRIBUTION, ParametricDistributionModel.class, "The distribution model for rates among branches", false),
             new ElementRule(RATE_CATEGORIES, Parameter.class, "The rate categories parameter", false),
diff --git a/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java b/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java
index c95ad85..fe16c2c 100644
--- a/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java
+++ b/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java
@@ -40,6 +40,7 @@ public class CataclysmicDemographicModelParser extends AbstractXMLObjectParser {
     public static final String GROWTH_RATE = "growthRate";
     public static final String SPIKE_SIZE = "spikeFactor";
     public static final String TIME_OF_CATACLYSM = "timeOfCataclysm";
+    public static final String DECLINE_RATE = "declineRate";
 
     public static final String CATACLYSM_MODEL = "cataclysm";
 
@@ -57,13 +58,24 @@ public class CataclysmicDemographicModelParser extends AbstractXMLObjectParser {
         cxo = xo.getChild(GROWTH_RATE);
         Parameter rParam = (Parameter) cxo.getChild(Parameter.class);
 
-        cxo = xo.getChild(SPIKE_SIZE);
-        Parameter N1Param = (Parameter) cxo.getChild(Parameter.class);
+        Parameter secondParam = null;
+        boolean useSpike = true;
+
+        if (xo.hasChildNamed(SPIKE_SIZE)) {
+            cxo = xo.getChild(SPIKE_SIZE);
+            secondParam = (Parameter) cxo.getChild(Parameter.class);
+        } else if (xo.hasChildNamed(DECLINE_RATE)) {
+            cxo = xo.getChild(DECLINE_RATE);
+            secondParam = (Parameter) cxo.getChild(Parameter.class);
+            useSpike = false;
+        } else {
+            throw new XMLParseException("Must provide either a spike factor or decline rate");
+        }
 
         cxo = xo.getChild(TIME_OF_CATACLYSM);
         Parameter tParam = (Parameter) cxo.getChild(Parameter.class);
 
-        return new CataclysmicDemographicModel(N0Param, N1Param, rParam, tParam, units);
+        return new CataclysmicDemographicModel(N0Param, secondParam, rParam, tParam, units, useSpike);
     }
 
     //************************************************************************
@@ -88,9 +100,12 @@ public class CataclysmicDemographicModelParser extends AbstractXMLObjectParser {
             new ElementRule(GROWTH_RATE,
                     new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
                     "The rate of exponential growth before the cataclysmic event."),
-            new ElementRule(SPIKE_SIZE,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
-                    "The factor larger the population size was at its height."),
+            new XORRule(
+                    new ElementRule(SPIKE_SIZE,
+                        new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
+                        "The factor larger the population size was at its height."),
+                    new ElementRule(DECLINE_RATE,
+                            new XMLSyntaxRule[] { new ElementRule(Parameter.class)})),
             new ElementRule(TIME_OF_CATACLYSM,
                     new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
                     "The time of the cataclysmic event that lead to exponential decline."),
diff --git a/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java b/src/dr/evomodelxml/coalescent/MultiEpochExponentialModelParser.java
similarity index 58%
copy from src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java
copy to src/dr/evomodelxml/coalescent/MultiEpochExponentialModelParser.java
index c95ad85..bdc159e 100644
--- a/src/dr/evomodelxml/coalescent/CataclysmicDemographicModelParser.java
+++ b/src/dr/evomodelxml/coalescent/MultiEpochExponentialModelParser.java
@@ -1,99 +1,91 @@
-/*
- * CataclysmicDemographicModelParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.evomodelxml.coalescent;
-
-import dr.evolution.util.Units;
-import dr.evomodel.coalescent.CataclysmicDemographicModel;
-import dr.evoxml.util.XMLUnits;
-import dr.inference.model.Parameter;
-import dr.xml.*;
-
-/**
- * Parses an element from an DOM document into a ExponentialGrowth.
- */
-public class CataclysmicDemographicModelParser extends AbstractXMLObjectParser {
-
-    public static final String POPULATION_SIZE = "populationSize";
-    public static final String GROWTH_RATE = "growthRate";
-    public static final String SPIKE_SIZE = "spikeFactor";
-    public static final String TIME_OF_CATACLYSM = "timeOfCataclysm";
-
-    public static final String CATACLYSM_MODEL = "cataclysm";
-
-    public String getParserName() {
-        return CATACLYSM_MODEL;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-        Units.Type units = XMLUnits.Utils.getUnitsAttr(xo);
-
-        XMLObject cxo = xo.getChild(POPULATION_SIZE);
-        Parameter N0Param = (Parameter) cxo.getChild(Parameter.class);
-
-        cxo = xo.getChild(GROWTH_RATE);
-        Parameter rParam = (Parameter) cxo.getChild(Parameter.class);
-
-        cxo = xo.getChild(SPIKE_SIZE);
-        Parameter N1Param = (Parameter) cxo.getChild(Parameter.class);
-
-        cxo = xo.getChild(TIME_OF_CATACLYSM);
-        Parameter tParam = (Parameter) cxo.getChild(Parameter.class);
-
-        return new CataclysmicDemographicModel(N0Param, N1Param, rParam, tParam, units);
-    }
-
-    //************************************************************************
-    // AbstractXMLObjectParser implementation
-    //************************************************************************
-
-    public String getParserDescription() {
-        return "A demographic model of exponential growth.";
-    }
-
-    public Class getReturnType() {
-        return CataclysmicDemographicModel.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private final XMLSyntaxRule[] rules = {
-            new ElementRule(POPULATION_SIZE,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-            new ElementRule(GROWTH_RATE,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
-                    "The rate of exponential growth before the cataclysmic event."),
-            new ElementRule(SPIKE_SIZE,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
-                    "The factor larger the population size was at its height."),
-            new ElementRule(TIME_OF_CATACLYSM,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)},
-                    "The time of the cataclysmic event that lead to exponential decline."),
-            XMLUnits.SYNTAX_RULES[0]
-    };
-}
+/*
+ * MultiEpochExponentialModelParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.coalescent;
+
+import dr.evolution.util.Units;
+import dr.evomodel.coalescent.ExponentialExponentialModel;
+import dr.evomodel.coalescent.MultiEpochExponentialModel;
+import dr.evoxml.util.XMLUnits;
+import dr.inference.model.Parameter;
+import dr.xml.*;
+
+/**
+ * @author Marc A. Suchard
+ */
+
+public class MultiEpochExponentialModelParser extends AbstractXMLObjectParser {
+
+    public static final String MULTI_EPOCH_EXPONENTIAL_MODEL = "multiEpochExponential";
+    public static final String POPULATION_SIZE = "populationSize";
+    public static final String TRANSITION_TIME = "transitionTime";
+    public static final String GROWTH_RATE = "growthRate";
+
+    public String getParserName() {
+        return MULTI_EPOCH_EXPONENTIAL_MODEL;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        Units.Type units = XMLUnits.Utils.getUnitsAttr(xo);
+
+        XMLObject cxo = xo.getChild(POPULATION_SIZE);
+        Parameter N0Param = (Parameter) cxo.getChild(Parameter.class);
+
+        cxo = xo.getChild(GROWTH_RATE);
+        Parameter growthParam = (Parameter) cxo.getChild(Parameter.class);
+
+        cxo = xo.getChild(TRANSITION_TIME);
+        Parameter timeParam = (Parameter) cxo.getChild(Parameter.class);
+
+        return new MultiEpochExponentialModel(xo.getId(), N0Param, growthParam, timeParam, units);
+    }
+
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public String getParserDescription() {
+        return "A demographic model of multi-phae exponential growth.";
+    }
+
+    public Class getReturnType() {
+        return ExponentialExponentialModel.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            XMLUnits.SYNTAX_RULES[0],
+            new ElementRule(POPULATION_SIZE,
+                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+            new ElementRule(GROWTH_RATE,
+                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+            new ElementRule(TRANSITION_TIME,
+                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+    };
+}
diff --git a/src/dr/evomodelxml/operators/LatentFactorHamiltonianMCParser.java b/src/dr/evomodelxml/operators/LatentFactorHamiltonianMCParser.java
new file mode 100644
index 0000000..a272929
--- /dev/null
+++ b/src/dr/evomodelxml/operators/LatentFactorHamiltonianMCParser.java
@@ -0,0 +1,65 @@
+package dr.evomodelxml.operators;
+
+import dr.evolution.tree.MultivariateTraitTree;
+import dr.evomodel.continuous.AbstractMultivariateTraitLikelihood;
+import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
+import dr.evomodel.operators.LatentFactorHamiltonianMC;
+import dr.inference.model.CompoundParameter;
+import dr.inference.model.LatentFactorModel;
+import dr.inference.operators.CoercionMode;
+import dr.xml.*;
+
+/**
+ * Created by max on 12/2/15.
+ */
+public class LatentFactorHamiltonianMCParser extends AbstractXMLObjectParser {
+    public static final String LATENT_FACTOR_MODEL_HAMILTONIAN_MC="LatentFactorHamiltonianMC";
+    public static final String WEIGHT="weight";
+    public static final String N_STEPS="nSteps";
+    public static final String STEP_SIZE="stepSize";
+    public static final String MOMENTUM_SD="momentumSd";
+    @Override
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        LatentFactorModel lfm=(LatentFactorModel) xo.getChild(LatentFactorModel.class);
+        FullyConjugateMultivariateTraitLikelihood tree=(FullyConjugateMultivariateTraitLikelihood) xo.getChild(FullyConjugateMultivariateTraitLikelihood.class);
+        double weight=xo.getDoubleAttribute(WEIGHT);
+        CoercionMode mode=CoercionMode.parseMode(xo);
+        int nSteps=xo.getIntegerAttribute(N_STEPS);
+        double stepSize=xo.getDoubleAttribute(STEP_SIZE);
+        double momentumSd= xo.getDoubleAttribute(MOMENTUM_SD);
+
+
+        return new LatentFactorHamiltonianMC(lfm, tree, weight, mode, stepSize, nSteps, momentumSd);
+
+
+    }
+
+    @Override
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private static final XMLSyntaxRule[] rules = {
+            AttributeRule.newDoubleRule(WEIGHT),
+            AttributeRule.newDoubleRule(STEP_SIZE),
+            AttributeRule.newIntegerRule(N_STEPS),
+            AttributeRule.newDoubleRule(MOMENTUM_SD),
+            new ElementRule(LatentFactorModel.class),
+            new ElementRule(FullyConjugateMultivariateTraitLikelihood.class),
+    };
+
+    @Override
+    public String getParserDescription() {
+        return "Hamiltonian Monte Carlo For factors";
+    }
+
+    @Override
+    public Class getReturnType() {
+        return LatentFactorHamiltonianMC.class;
+    }
+
+    @Override
+    public String getParserName() {
+        return LATENT_FACTOR_MODEL_HAMILTONIAN_MC;
+    }
+}
diff --git a/src/dr/inferencexml/operators/MicrosatUpDownOperatorParser.java b/src/dr/evomodelxml/operators/MicrosatUpDownOperatorParser.java
similarity index 94%
rename from src/dr/inferencexml/operators/MicrosatUpDownOperatorParser.java
rename to src/dr/evomodelxml/operators/MicrosatUpDownOperatorParser.java
index 5a18098..885a795 100644
--- a/src/dr/inferencexml/operators/MicrosatUpDownOperatorParser.java
+++ b/src/dr/evomodelxml/operators/MicrosatUpDownOperatorParser.java
@@ -1,99 +1,102 @@
-/*
- * MicrosatUpDownOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.inference.model.Parameter;
-import dr.inference.operators.*;
-import dr.xml.*;
-
-/**
- */
-public class MicrosatUpDownOperatorParser extends AbstractXMLObjectParser {
-
-    public static final String MICROSAT_UP_DOWN_OPERATOR = "microsatUpDownOperator";
-    public static final String UP = UpDownOperatorParser.UP;
-    public static final String DOWN = UpDownOperatorParser.DOWN;
-
-    public static final String SCALE_FACTOR = ScaleOperatorParser.SCALE_FACTOR;
-
-    public String getParserName() {
-        return MICROSAT_UP_DOWN_OPERATOR;
-    }
-
-    private Scalable.Default[] getArgs(final XMLObject list) throws XMLParseException {
-        Scalable.Default[] args = new Scalable.Default[list.getChildCount()];
-        for (int k = 0; k < list.getChildCount(); ++k) {
-            final Object child = list.getChild(k);
-            if (child instanceof Parameter) {
-                args[k] = new Scalable.Default((Parameter) child);
-            }
-
-        }
-        return args;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-        final double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR);
-
-        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-
-        final CoercionMode mode = CoercionMode.parseMode(xo);
-
-        final Scalable.Default[] upArgs = getArgs(xo.getChild(UP));
-        final Scalable.Default[] dnArgs = getArgs(xo.getChild(DOWN));
-
-        return new MicrosatUpDownOperator(upArgs, dnArgs, scaleFactor, weight, mode);
-    }
-
-    public String getParserDescription() {
-        return "This element represents an operator that scales two parameters in different directions. " +
-                "Each operation involves selecting a scale uniformly at random between scaleFactor and 1/scaleFactor. " +
-                "The up parameter is multipled by this scale and the down parameter is divided by this scale.";
-    }
-
-    public Class getReturnType() {
-        return MicrosatUpDownOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private final XMLSyntaxRule[] ee = {
-            new ElementRule(Parameter.class, true)
-    };
-
-    private final XMLSyntaxRule[] rules = {
-            AttributeRule.newDoubleRule(SCALE_FACTOR),
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
-
-            // Allow an arbitrary number of Parameters or Scalables in up or down
-            new ElementRule(UP, ee, 1, Integer.MAX_VALUE),
-            new ElementRule(DOWN, ee, 1, Integer.MAX_VALUE),
-    };
-}
+/*
+ * MicrosatUpDownOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.evomodel.operators.MicrosatUpDownOperator;
+import dr.inference.model.Parameter;
+import dr.inference.operators.*;
+import dr.inferencexml.operators.ScaleOperatorParser;
+import dr.inferencexml.operators.UpDownOperatorParser;
+import dr.xml.*;
+
+/**
+ */
+public class MicrosatUpDownOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String MICROSAT_UP_DOWN_OPERATOR = "microsatUpDownOperator";
+    public static final String UP = UpDownOperatorParser.UP;
+    public static final String DOWN = UpDownOperatorParser.DOWN;
+
+    public static final String SCALE_FACTOR = ScaleOperatorParser.SCALE_FACTOR;
+
+    public String getParserName() {
+        return MICROSAT_UP_DOWN_OPERATOR;
+    }
+
+    private Scalable.Default[] getArgs(final XMLObject list) throws XMLParseException {
+        Scalable.Default[] args = new Scalable.Default[list.getChildCount()];
+        for (int k = 0; k < list.getChildCount(); ++k) {
+            final Object child = list.getChild(k);
+            if (child instanceof Parameter) {
+                args[k] = new Scalable.Default((Parameter) child);
+            }
+
+        }
+        return args;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        final double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR);
+
+        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+        final CoercionMode mode = CoercionMode.parseMode(xo);
+
+        final Scalable.Default[] upArgs = getArgs(xo.getChild(UP));
+        final Scalable.Default[] dnArgs = getArgs(xo.getChild(DOWN));
+
+        return new MicrosatUpDownOperator(upArgs, dnArgs, scaleFactor, weight, mode);
+    }
+
+    public String getParserDescription() {
+        return "This element represents an operator that scales two parameters in different directions. " +
+                "Each operation involves selecting a scale uniformly at random between scaleFactor and 1/scaleFactor. " +
+                "The up parameter is multipled by this scale and the down parameter is divided by this scale.";
+    }
+
+    public Class getReturnType() {
+        return MicrosatUpDownOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] ee = {
+            new ElementRule(Parameter.class, true)
+    };
+
+    private final XMLSyntaxRule[] rules = {
+            AttributeRule.newDoubleRule(SCALE_FACTOR),
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
+
+            // Allow an arbitrary number of Parameters or Scalables in up or down
+            new ElementRule(UP, ee, 1, Integer.MAX_VALUE),
+            new ElementRule(DOWN, ee, 1, Integer.MAX_VALUE),
+    };
+}
diff --git a/src/dr/inferencexml/operators/MicrosatelliteModelSelectOperatorParser.java b/src/dr/evomodelxml/operators/MicrosatelliteModelSelectOperatorParser.java
similarity index 94%
rename from src/dr/inferencexml/operators/MicrosatelliteModelSelectOperatorParser.java
rename to src/dr/evomodelxml/operators/MicrosatelliteModelSelectOperatorParser.java
index dee6a14..6edfbae 100644
--- a/src/dr/inferencexml/operators/MicrosatelliteModelSelectOperatorParser.java
+++ b/src/dr/evomodelxml/operators/MicrosatelliteModelSelectOperatorParser.java
@@ -1,73 +1,73 @@
-/*
- * MicrosatelliteModelSelectOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.inference.model.Parameter;
-import dr.inference.operators.MCMCOperator;
-import dr.inference.operators.MicrosatelliteModelSelectOperator;
-import dr.xml.*;
-
-/**
- * Parser for MicrosatelliteModelSelectOperatorParser
- */
-public class MicrosatelliteModelSelectOperatorParser extends AbstractXMLObjectParser {
-
-    public static final String MODEL_INDICATORS = "modelIndicators";
-    public static final String MODEL_CHOOSE = "modelChoose";
-
-    public String getParserName() {
-        return "msatModelSelectOperator";
-    }
-         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-        Parameter modelChoose = (Parameter)xo.getElementFirstChild(MODEL_CHOOSE);
-        XMLObject xoInd = xo.getChild(MODEL_INDICATORS);
-             int childNum = xoInd.getChildCount();
-        System.out.println("There are 12 potential models");
-        Parameter[] modelIndicators = new Parameter[childNum];
-        for(int i = 0; i < modelIndicators.length; i++){
-            modelIndicators[i] = (Parameter)xoInd.getChild(i);
-        }
-             return new MicrosatelliteModelSelectOperator(modelChoose, modelIndicators, weight);
-    }
-         //************************************************************************
-    // AbstractXMLObjectParser implementation
-    //************************************************************************
-         public String getParserDescription() {
-        return "This element returns a microsatellite averaging operator on a given parameter.";
-    }
-         public Class getReturnType() {
-        return MCMCOperator.class;
-    }
-         public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-         private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-            new ElementRule(MODEL_INDICATORS, new XMLSyntaxRule[]{new ElementRule(Parameter.class,1,Integer.MAX_VALUE)}),
-    };
+/*
+ * MicrosatelliteModelSelectOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.evomodel.operators.MicrosatelliteModelSelectOperator;
+import dr.xml.*;
+
+/**
+ * Parser for MicrosatelliteModelSelectOperatorParser
+ */
+public class MicrosatelliteModelSelectOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String MODEL_INDICATORS = "modelIndicators";
+    public static final String MODEL_CHOOSE = "modelChoose";
+
+    public String getParserName() {
+        return "msatModelSelectOperator";
+    }
+         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+        Parameter modelChoose = (Parameter)xo.getElementFirstChild(MODEL_CHOOSE);
+        XMLObject xoInd = xo.getChild(MODEL_INDICATORS);
+             int childNum = xoInd.getChildCount();
+        System.out.println("There are 12 potential models");
+        Parameter[] modelIndicators = new Parameter[childNum];
+        for(int i = 0; i < modelIndicators.length; i++){
+            modelIndicators[i] = (Parameter)xoInd.getChild(i);
+        }
+             return new MicrosatelliteModelSelectOperator(modelChoose, modelIndicators, weight);
+    }
+         //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+         public String getParserDescription() {
+        return "This element returns a microsatellite averaging operator on a given parameter.";
+    }
+         public Class getReturnType() {
+        return MCMCOperator.class;
+    }
+         public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+         private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+            new ElementRule(MODEL_INDICATORS, new XMLSyntaxRule[]{new ElementRule(Parameter.class,1,Integer.MAX_VALUE)}),
+    };
 }
\ No newline at end of file
diff --git a/src/dr/inferencexml/operators/MsatBitFlipOperatorParser.java b/src/dr/evomodelxml/operators/MsatBitFlipOperatorParser.java
similarity index 95%
rename from src/dr/inferencexml/operators/MsatBitFlipOperatorParser.java
rename to src/dr/evomodelxml/operators/MsatBitFlipOperatorParser.java
index 3162b57..2ffb611 100644
--- a/src/dr/inferencexml/operators/MsatBitFlipOperatorParser.java
+++ b/src/dr/evomodelxml/operators/MsatBitFlipOperatorParser.java
@@ -1,85 +1,85 @@
-/*
- * MsatBitFlipOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.xml.*;
-import dr.inference.model.Parameter;
-import dr.inference.operators.MCMCOperator;
-import dr.inference.operators.MsatBitFlipOperator;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * Parser for MicrosatelliteAveragingOperatorParser
- */
-public class MsatBitFlipOperatorParser extends AbstractXMLObjectParser{
-    public static final String MODEL_CHOOSE = "modelChoose";
-    public static final String DEPENDENCIES = "dependencies";
-    public static final String VARIABLE_INDICES = "variableIndices";
-
-    public String getParserName() {
-        return "msatModelSwitchOperator";
-    }
-         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-        Parameter modelChoose = (Parameter) xo.getElementFirstChild(MODEL_CHOOSE);
-        Parameter dependencies = (Parameter)xo.getElementFirstChild(DEPENDENCIES);
-        int[] variableIndices;
-            if(xo.hasChildNamed(VARIABLE_INDICES)){
-
-                double[] temp = ((Parameter)xo.getElementFirstChild(VARIABLE_INDICES)).getParameterValues();
-                variableIndices = new int[temp.length];
-                for(int i = 0; i < temp.length;i++){
-                    variableIndices[i] = (int)temp[i];
-                }
-
-            }else{
-                variableIndices = new int[]{0, 1, 2, 3, 4, 5};
-            }
-
-            return new MsatBitFlipOperator(modelChoose, dependencies, weight, variableIndices);
-    }
-         //************************************************************************
-    // AbstractXMLObjectParser implementation
-    //************************************************************************
-         public String getParserDescription() {
-        return "This element returns a microsatellite averaging operator on a given parameter.";
-    }
-         public Class getReturnType() {
-        return MCMCOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-            new ElementRule(DEPENDENCIES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
-            new ElementRule(VARIABLE_INDICES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)},true)
-    };
-}
+/*
+ * MsatBitFlipOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.xml.*;
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.evomodel.operators.MsatBitFlipOperator;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * Parser for MicrosatelliteAveragingOperatorParser
+ */
+public class MsatBitFlipOperatorParser extends AbstractXMLObjectParser{
+    public static final String MODEL_CHOOSE = "modelChoose";
+    public static final String DEPENDENCIES = "dependencies";
+    public static final String VARIABLE_INDICES = "variableIndices";
+
+    public String getParserName() {
+        return "msatModelSwitchOperator";
+    }
+         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+             double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+        Parameter modelChoose = (Parameter) xo.getElementFirstChild(MODEL_CHOOSE);
+        Parameter dependencies = (Parameter)xo.getElementFirstChild(DEPENDENCIES);
+        int[] variableIndices;
+            if(xo.hasChildNamed(VARIABLE_INDICES)){
+
+                double[] temp = ((Parameter)xo.getElementFirstChild(VARIABLE_INDICES)).getParameterValues();
+                variableIndices = new int[temp.length];
+                for(int i = 0; i < temp.length;i++){
+                    variableIndices[i] = (int)temp[i];
+                }
+
+            }else{
+                variableIndices = new int[]{0, 1, 2, 3, 4, 5};
+            }
+
+            return new MsatBitFlipOperator(modelChoose, dependencies, weight, variableIndices);
+    }
+         //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+         public String getParserDescription() {
+        return "This element returns a microsatellite averaging operator on a given parameter.";
+    }
+         public Class getReturnType() {
+        return MCMCOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(MODEL_CHOOSE, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+            new ElementRule(DEPENDENCIES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
+            new ElementRule(VARIABLE_INDICES, new XMLSyntaxRule[]{new ElementRule(Parameter.class)},true)
+    };
+}
diff --git a/src/dr/inferencexml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java b/src/dr/evomodelxml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java
similarity index 94%
rename from src/dr/inferencexml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java
rename to src/dr/evomodelxml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java
index ab02a48..d382cdb 100644
--- a/src/dr/inferencexml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java
+++ b/src/dr/evomodelxml/operators/MsatFullAncestryImportanceSamplingOperatorParser.java
@@ -1,74 +1,74 @@
-/*
- * MsatFullAncestryImportanceSamplingOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.xml.*;
-import dr.inference.operators.MsatFullAncestryImportanceSamplingOperator;
-import dr.inference.operators.MCMCOperator;
-import dr.inference.model.Parameter;
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-import dr.evomodel.substmodel.MicrosatelliteModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * Parser for MsatFullAncestryGibbsOperator
- */
-public class MsatFullAncestryImportanceSamplingOperatorParser extends AbstractXMLObjectParser {
-    public String getParserName(){
-        return MsatFullAncestryImportanceSamplingOperator.MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-        final Parameter parameter = (Parameter)xo.getChild(Parameter.class);
-        final MicrosatelliteSamplerTreeModel msatSamplerTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
-        final MicrosatelliteModel msatModel = (MicrosatelliteModel)xo.getChild(MicrosatelliteModel.class);
-        final BranchRateModel branchRateModel = (BranchRateModel)xo.getChild(BranchRateModel.class);
-
-        return new MsatFullAncestryImportanceSamplingOperator(parameter, msatSamplerTreeModel, msatModel, branchRateModel,weight);
-    }
-
-    public String getParserDescription() {
-        return "This element represents an operator that samples the full ancestry given a microsatellite pattern and a tree";
-    }
-
-    public Class getReturnType(){
-        return MsatFullAncestryImportanceSamplingOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            new ElementRule(Parameter.class),
-            new ElementRule(MicrosatelliteSamplerTreeModel.class),
-            new ElementRule(MicrosatelliteModel.class),
-            new ElementRule(BranchRateModel.class)
-    };
-}
+/*
+ * MsatFullAncestryImportanceSamplingOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.xml.*;
+import dr.evomodel.operators.MsatFullAncestryImportanceSamplingOperator;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.model.Parameter;
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+import dr.evomodel.substmodel.MicrosatelliteModel;
+import dr.evomodel.branchratemodel.BranchRateModel;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * Parser for MsatFullAncestryGibbsOperator
+ */
+public class MsatFullAncestryImportanceSamplingOperatorParser extends AbstractXMLObjectParser {
+    public String getParserName(){
+        return MsatFullAncestryImportanceSamplingOperator.MSAT_FULL_ANCESTRY_IMPORTANCE_SAMPLING_OPERATOR;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+        final Parameter parameter = (Parameter)xo.getChild(Parameter.class);
+        final MicrosatelliteSamplerTreeModel msatSamplerTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
+        final MicrosatelliteModel msatModel = (MicrosatelliteModel)xo.getChild(MicrosatelliteModel.class);
+        final BranchRateModel branchRateModel = (BranchRateModel)xo.getChild(BranchRateModel.class);
+
+        return new MsatFullAncestryImportanceSamplingOperator(parameter, msatSamplerTreeModel, msatModel, branchRateModel,weight);
+    }
+
+    public String getParserDescription() {
+        return "This element represents an operator that samples the full ancestry given a microsatellite pattern and a tree";
+    }
+
+    public Class getReturnType(){
+        return MsatFullAncestryImportanceSamplingOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(Parameter.class),
+            new ElementRule(MicrosatelliteSamplerTreeModel.class),
+            new ElementRule(MicrosatelliteModel.class),
+            new ElementRule(BranchRateModel.class)
+    };
+}
diff --git a/src/dr/inferencexml/operators/MsatSingleAncestralStateGibbsOperatorParser.java b/src/dr/evomodelxml/operators/MsatSingleAncestralStateGibbsOperatorParser.java
similarity index 94%
rename from src/dr/inferencexml/operators/MsatSingleAncestralStateGibbsOperatorParser.java
rename to src/dr/evomodelxml/operators/MsatSingleAncestralStateGibbsOperatorParser.java
index fb64019..26635e6 100644
--- a/src/dr/inferencexml/operators/MsatSingleAncestralStateGibbsOperatorParser.java
+++ b/src/dr/evomodelxml/operators/MsatSingleAncestralStateGibbsOperatorParser.java
@@ -1,72 +1,72 @@
-/*
- * MsatSingleAncestralStateGibbsOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.inference.operators.MCMCOperator;
-import dr.inference.operators.MsatSingleAncestralStateGibbsOperator;
-import dr.inference.model.Parameter;
-import dr.xml.*;
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-import dr.evomodel.substmodel.MicrosatelliteModel;
-import dr.evomodel.branchratemodel.BranchRateModel;
-
-/**
- * @author Chieh-Hsi Wu
- */
-public class MsatSingleAncestralStateGibbsOperatorParser extends AbstractXMLObjectParser {
-    public String getParserName(){
-        return MsatSingleAncestralStateGibbsOperator.MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-        final Parameter parameter = (Parameter)xo.getChild(Parameter.class);
-        final MicrosatelliteSamplerTreeModel msatSamplerTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
-        final MicrosatelliteModel msatModel = (MicrosatelliteModel)xo.getChild(MicrosatelliteModel.class);
-        final BranchRateModel branchRateModel = (BranchRateModel)xo.getChild(BranchRateModel.class);
-
-        return new MsatSingleAncestralStateGibbsOperator(parameter, msatSamplerTreeModel, msatModel, branchRateModel,weight);
-    }
-
-    public String getParserDescription() {
-        return "This element represents an operator that samples the state of a single ancestor given a microsatellite pattern and a tree";
-    }
-
-    public Class getReturnType(){
-        return MsatSingleAncestralStateGibbsOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            new ElementRule(Parameter.class),
-            new ElementRule(MicrosatelliteSamplerTreeModel.class),
-            new ElementRule(MicrosatelliteModel.class),
-            new ElementRule(BranchRateModel.class)
-    };
-}
+/*
+ * MsatSingleAncestralStateGibbsOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.inference.operators.MCMCOperator;
+import dr.evomodel.operators.MsatSingleAncestralStateGibbsOperator;
+import dr.inference.model.Parameter;
+import dr.xml.*;
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+import dr.evomodel.substmodel.MicrosatelliteModel;
+import dr.evomodel.branchratemodel.BranchRateModel;
+
+/**
+ * @author Chieh-Hsi Wu
+ */
+public class MsatSingleAncestralStateGibbsOperatorParser extends AbstractXMLObjectParser {
+    public String getParserName(){
+        return MsatSingleAncestralStateGibbsOperator.MSAT_SINGLE_ANCESTAL_STATE_GIBBS_OPERATOR;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+        final Parameter parameter = (Parameter)xo.getChild(Parameter.class);
+        final MicrosatelliteSamplerTreeModel msatSamplerTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
+        final MicrosatelliteModel msatModel = (MicrosatelliteModel)xo.getChild(MicrosatelliteModel.class);
+        final BranchRateModel branchRateModel = (BranchRateModel)xo.getChild(BranchRateModel.class);
+
+        return new MsatSingleAncestralStateGibbsOperator(parameter, msatSamplerTreeModel, msatModel, branchRateModel,weight);
+    }
+
+    public String getParserDescription() {
+        return "This element represents an operator that samples the state of a single ancestor given a microsatellite pattern and a tree";
+    }
+
+    public Class getReturnType(){
+        return MsatSingleAncestralStateGibbsOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(Parameter.class),
+            new ElementRule(MicrosatelliteSamplerTreeModel.class),
+            new ElementRule(MicrosatelliteModel.class),
+            new ElementRule(BranchRateModel.class)
+    };
+}
diff --git a/src/dr/inferencexml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java b/src/dr/evomodelxml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java
similarity index 92%
rename from src/dr/inferencexml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java
rename to src/dr/evomodelxml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java
index 8aa09ca..c507e29 100644
--- a/src/dr/inferencexml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java
+++ b/src/dr/evomodelxml/operators/RandomWalkIntegerNodeHeightWeightedOperatorParser.java
@@ -1,84 +1,83 @@
-/*
- * RandomWalkIntegerNodeHeightWeightedOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.xml.*;
-import dr.inference.operators.MCMCOperator;
-import dr.inference.operators.RandomWalkIntegerOperator;
-import dr.inference.operators.RandomWalkIntegerNodeHeightWeightedOperator;
-import dr.inference.model.Parameter;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * The parser for random walk integer node height weighted operator.
- */
-public class RandomWalkIntegerNodeHeightWeightedOperatorParser extends AbstractXMLObjectParser {
-
-    public static final String RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP = "randomWalkIntegerNodeHeightWeightedOperator";
-
-    public static final String WINDOW_SIZE = "windowSize";
-    public static final String INTERNAL_NODE_HEIGHTS = "internalNodeHeights";
-
-    public String getParserName() {
-        return RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-        double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-
-        double d = xo.getDoubleAttribute(WINDOW_SIZE);
-        if (d != Math.floor(d)) {
-            throw new XMLParseException("The window size of a " + RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP + " should be an integer");
-        }
-
-        int windowSize = (int)d;
-        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
-        Parameter internalNodeHeights = (Parameter)xo.getElementFirstChild(INTERNAL_NODE_HEIGHTS);
-        
-        return new RandomWalkIntegerNodeHeightWeightedOperator(parameter, windowSize, weight, internalNodeHeights);
-    }
-
-    public String getParserDescription() {
-        return "This element returns a random walk node height weighted operator on a given parameter.";
-    }
-
-    public Class getReturnType() {
-        return RandomWalkIntegerNodeHeightWeightedOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(WINDOW_SIZE),
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            new ElementRule(Parameter.class),
-            new ElementRule(INTERNAL_NODE_HEIGHTS, new XMLSyntaxRule[]{new ElementRule(Parameter.class)})
-    };
-}
+/*
+ * RandomWalkIntegerNodeHeightWeightedOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.xml.*;
+import dr.inference.operators.MCMCOperator;
+import dr.evomodel.operators.RandomWalkIntegerNodeHeightWeightedOperator;
+import dr.inference.model.Parameter;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * The parser for random walk integer node height weighted operator.
+ */
+public class RandomWalkIntegerNodeHeightWeightedOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP = "randomWalkIntegerNodeHeightWeightedOperator";
+
+    public static final String WINDOW_SIZE = "windowSize";
+    public static final String INTERNAL_NODE_HEIGHTS = "internalNodeHeights";
+
+    public String getParserName() {
+        return RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+        double d = xo.getDoubleAttribute(WINDOW_SIZE);
+        if (d != Math.floor(d)) {
+            throw new XMLParseException("The window size of a " + RANDOM_WALK_INT_NODE_HEIGHT_WGT_OP + " should be an integer");
+        }
+
+        int windowSize = (int)d;
+        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+        Parameter internalNodeHeights = (Parameter)xo.getElementFirstChild(INTERNAL_NODE_HEIGHTS);
+        
+        return new RandomWalkIntegerNodeHeightWeightedOperator(parameter, windowSize, weight, internalNodeHeights);
+    }
+
+    public String getParserDescription() {
+        return "This element returns a random walk node height weighted operator on a given parameter.";
+    }
+
+    public Class getReturnType() {
+        return RandomWalkIntegerNodeHeightWeightedOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(WINDOW_SIZE),
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(Parameter.class),
+            new ElementRule(INTERNAL_NODE_HEIGHTS, new XMLSyntaxRule[]{new ElementRule(Parameter.class)})
+    };
+}
diff --git a/src/dr/inferencexml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java b/src/dr/evomodelxml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java
similarity index 94%
rename from src/dr/inferencexml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java
rename to src/dr/evomodelxml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java
index 27860af..6ac3dee 100644
--- a/src/dr/inferencexml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java
+++ b/src/dr/evomodelxml/operators/RandomWalkIntegerSetSizeWeightedOperatorParser.java
@@ -1,87 +1,87 @@
-/*
- * RandomWalkIntegerSetSizeWeightedOperatorParser.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.inferencexml.operators;
-
-import dr.xml.*;
-import dr.inference.operators.MCMCOperator;
-import dr.inference.operators.RandomWalkIntegerSetSizeWeightedOperator;
-import dr.inference.model.Parameter;
-import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
-
-/**
- * @author Chieh-Hsi Wu
- *
- * This is the parser for the random walk integer set size weighted operator.
- */
-public class RandomWalkIntegerSetSizeWeightedOperatorParser extends AbstractXMLObjectParser {
-
-    public static final String RANDOM_WALK_INT_SET_SIZE_WGT_OP = "randomWalkIntegerSetSizeWeightedOperator";
-
-    public static final String WINDOW_SIZE = "windowSize";
-    public static final String BASE_SET_SIZE = "baseSetSize";
-
-    public String getParserName() {
-        return RANDOM_WALK_INT_SET_SIZE_WGT_OP;
-    }
-
-    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
-
-        double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
-
-        double d = xo.getDoubleAttribute(WINDOW_SIZE);
-        if (d != Math.floor(d)) {
-            throw new XMLParseException("The window size of a " + RANDOM_WALK_INT_SET_SIZE_WGT_OP + " should be an integer");
-        }
-
-        double baseSetSize = xo.getDoubleAttribute(BASE_SET_SIZE);
-
-        int windowSize = (int)d;
-        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
-        MicrosatelliteSamplerTreeModel msatSampleTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
-
-        return new RandomWalkIntegerSetSizeWeightedOperator(parameter, windowSize, weight, msatSampleTreeModel, baseSetSize);
-    }
-
-    public String getParserDescription() {
-        return "This element returns a random walk set size weighted operator on a given parameter.";
-    }
-
-    public Class getReturnType() {
-        return RandomWalkIntegerSetSizeWeightedOperator.class;
-    }
-
-    public XMLSyntaxRule[] getSyntaxRules() {
-        return rules;
-    }
-
-    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
-            AttributeRule.newDoubleRule(WINDOW_SIZE),
-            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
-            AttributeRule.newDoubleRule(BASE_SET_SIZE),
-            new ElementRule(Parameter.class),
-            new ElementRule(MicrosatelliteSamplerTreeModel.class)
-    };
+/*
+ * RandomWalkIntegerSetSizeWeightedOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.xml.*;
+import dr.inference.operators.MCMCOperator;
+import dr.evomodel.operators.RandomWalkIntegerSetSizeWeightedOperator;
+import dr.inference.model.Parameter;
+import dr.evomodel.tree.MicrosatelliteSamplerTreeModel;
+
+/**
+ * @author Chieh-Hsi Wu
+ *
+ * This is the parser for the random walk integer set size weighted operator.
+ */
+public class RandomWalkIntegerSetSizeWeightedOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String RANDOM_WALK_INT_SET_SIZE_WGT_OP = "randomWalkIntegerSetSizeWeightedOperator";
+
+    public static final String WINDOW_SIZE = "windowSize";
+    public static final String BASE_SET_SIZE = "baseSetSize";
+
+    public String getParserName() {
+        return RANDOM_WALK_INT_SET_SIZE_WGT_OP;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+        double d = xo.getDoubleAttribute(WINDOW_SIZE);
+        if (d != Math.floor(d)) {
+            throw new XMLParseException("The window size of a " + RANDOM_WALK_INT_SET_SIZE_WGT_OP + " should be an integer");
+        }
+
+        double baseSetSize = xo.getDoubleAttribute(BASE_SET_SIZE);
+
+        int windowSize = (int)d;
+        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+        MicrosatelliteSamplerTreeModel msatSampleTreeModel = (MicrosatelliteSamplerTreeModel)xo.getChild(MicrosatelliteSamplerTreeModel.class);
+
+        return new RandomWalkIntegerSetSizeWeightedOperator(parameter, windowSize, weight, msatSampleTreeModel, baseSetSize);
+    }
+
+    public String getParserDescription() {
+        return "This element returns a random walk set size weighted operator on a given parameter.";
+    }
+
+    public Class getReturnType() {
+        return RandomWalkIntegerSetSizeWeightedOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
+            AttributeRule.newDoubleRule(WINDOW_SIZE),
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            AttributeRule.newDoubleRule(BASE_SET_SIZE),
+            new ElementRule(Parameter.class),
+            new ElementRule(MicrosatelliteSamplerTreeModel.class)
+    };
 }
\ No newline at end of file
diff --git a/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java b/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
new file mode 100644
index 0000000..2a19270
--- /dev/null
+++ b/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
@@ -0,0 +1,83 @@
+/*
+ * SubtreeLeapOperatorParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.operators;
+
+import dr.evomodel.operators.SubtreeLeapOperator;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.operators.CoercableMCMCOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.MCMCOperator;
+import dr.xml.*;
+
+/**
+ */
+public class SubtreeLeapOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String SUBTREE_LEAP = "subtreeLeap";
+
+    public String getParserName() {
+        return SUBTREE_LEAP;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        CoercionMode mode = CoercionMode.parseMode(xo);
+
+        TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
+        final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+        final double size = xo.getAttribute("size", 1.0);
+
+        if (Double.isInfinite(size) || size <= 0.0) {
+            throw new XMLParseException("size attribute must be positive and not infinite. was " + size +
+           " for tree " + treeModel.getId() );
+        }
+
+        SubtreeLeapOperator operator = new SubtreeLeapOperator(treeModel, weight, size, mode);
+
+        return operator;
+    }
+
+    public String getParserDescription() {
+        return "An operator that moves subtree a certain distance.";
+    }
+
+    public Class getReturnType() {
+        return SubtreeLeapOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            AttributeRule.newDoubleRule("size", true),
+            AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
+            new ElementRule(TreeModel.class)
+    };
+
+}
diff --git a/src/dr/evomodelxml/treelikelihood/TreeTraitParserUtilities.java b/src/dr/evomodelxml/treelikelihood/TreeTraitParserUtilities.java
index 8fabd51..9804bff 100644
--- a/src/dr/evomodelxml/treelikelihood/TreeTraitParserUtilities.java
+++ b/src/dr/evomodelxml/treelikelihood/TreeTraitParserUtilities.java
@@ -26,10 +26,7 @@
 package dr.evomodelxml.treelikelihood;
 
 import dr.evolution.tree.MultivariateTraitTree;
-import dr.inference.model.CompoundParameter;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.Parameter;
-import dr.inference.model.ParameterParser;
+import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.xml.*;
 
@@ -258,7 +255,7 @@ public class TreeTraitParserUtilities {
         List<Integer> missingIndices = null;
 
         boolean isMatrixParameter = false;
-        if (parameter instanceof MatrixParameter) {
+        if (parameter instanceof MatrixParameter || parameter instanceof FastMatrixParameter) {
             traitParameter = (CompoundParameter) parameter;
             isMatrixParameter = true;
         } else
diff --git a/src/dr/evoxml/AttributePatternsParser.java b/src/dr/evoxml/AttributePatternsParser.java
index 8a38c46..0d548d5 100644
--- a/src/dr/evoxml/AttributePatternsParser.java
+++ b/src/dr/evoxml/AttributePatternsParser.java
@@ -27,6 +27,8 @@ package dr.evoxml;
 
 import dr.evolution.alignment.PatternList;
 import dr.evolution.alignment.Patterns;
+import dr.evolution.alignment.SimpleSiteList;
+import dr.evolution.alignment.SitePatterns;
 import dr.evolution.datatype.DataType;
 import dr.evolution.util.Taxon;
 import dr.evolution.util.TaxonList;
@@ -62,7 +64,8 @@ public class AttributePatternsParser extends AbstractXMLObjectParser {
             throw new XMLParseException("dataType expected for attributePatterns element");
         }
 
-        Patterns patterns = new Patterns(dataType, taxa);
+        // using a SimpleSiteList rather than Patterns to allow ancestral reconstruction
+        SimpleSiteList patterns = new SimpleSiteList(dataType, taxa);
 
         int[] pattern = new int[taxa.getTaxonCount()];
 
diff --git a/src/dr/evoxml/MicrosatellitePatternParser.java b/src/dr/evoxml/MicrosatellitePatternParser.java
index bdf89a7..6e2df2a 100644
--- a/src/dr/evoxml/MicrosatellitePatternParser.java
+++ b/src/dr/evoxml/MicrosatellitePatternParser.java
@@ -58,11 +58,18 @@ public class MicrosatellitePatternParser extends AbstractXMLObjectParser {
 
         Microsatellite microsatellite = (Microsatellite)xo.getChild(Microsatellite.class);
 
-        String[] strLengths = ((String)xo.getElementFirstChild(MICROSAT_SEQ)).split(",");
+        String[] strLengths = ((String) xo.getElementFirstChild(MICROSAT_SEQ)).split(",");
         int[] pattern = new int[strLengths.length];
-        for(int i = 0; i < strLengths.length; i++){
-            pattern[i] = microsatellite.getState(strLengths[i]);
+        try {
+            for (int i = 0; i < strLengths.length; i++) {
+                pattern[i] = microsatellite.getState(strLengths[i]);
+            }
+        } catch (NumberFormatException nfe) {
+            throw new XMLParseException("Unable to parse microsatellite data: " + nfe.getMessage());
+        } catch (IllegalArgumentException iae) {
+            throw new XMLParseException("Unable to parse microsatellite data: " + iae.getMessage());
         }
+
         Patterns microsatPat = new Patterns(microsatellite, taxonList);
         microsatPat.addPattern(pattern);
         microsatPat.setId((String)xo.getAttribute(ID));
@@ -81,21 +88,21 @@ public class MicrosatellitePatternParser extends AbstractXMLObjectParser {
 
     public static void printDetails(Patterns microsatPat){
         Logger.getLogger("dr.evoxml").info(
-            "    Locus name: "+microsatPat.getId()+
-            "\n    Number of Taxa: "+microsatPat.getPattern(0).length+
-            "\n    min: "+((Microsatellite)microsatPat.getDataType()).getMin()+" "+
-            "max: "+((Microsatellite)microsatPat.getDataType()).getMax()+
-            "\n    state count: "+microsatPat.getDataType().getStateCount()+"\n");
+                "    Locus name: "+microsatPat.getId()+
+                        "\n    Number of Taxa: "+microsatPat.getPattern(0).length+
+                        "\n    min: "+((Microsatellite)microsatPat.getDataType()).getMin()+" "+
+                        "max: "+((Microsatellite)microsatPat.getDataType()).getMax()+
+                        "\n    state count: "+microsatPat.getDataType().getStateCount()+"\n");
     }
 
     public static void printMicrosatContent(Patterns microsatPat){
         Logger.getLogger("dr.evoxml").info(
-            "    Locus name: "+ microsatPat.getId());
-            int[] pat = microsatPat.getPattern(0);
-            for(int i = 0; i < pat.length; i++){
-                Logger.getLogger("dr.evoxml").info("    Taxon: "+microsatPat.getTaxon(i)+" "+"state: "+pat[i]);
-            }
-            Logger.getLogger("dr.evoxml").info("\n");
+                "    Locus name: "+ microsatPat.getId());
+        int[] pat = microsatPat.getPattern(0);
+        for(int i = 0; i < pat.length; i++){
+            Logger.getLogger("dr.evoxml").info("    Taxon: "+microsatPat.getTaxon(i)+" "+"state: "+pat[i]);
+        }
+        Logger.getLogger("dr.evoxml").info("\n");
     }
 
 
@@ -117,7 +124,7 @@ public class MicrosatellitePatternParser extends AbstractXMLObjectParser {
     };
 
     public String getParserDescription() {
-       return "This element represents a microsatellite pattern.";
+        return "This element represents a microsatellite pattern.";
     }
 
     public Class getReturnType() {
diff --git a/src/dr/inference/distribution/BinomialLikelihood.java b/src/dr/inference/distribution/BinomialLikelihood.java
index c7ea16e..79ce92d 100644
--- a/src/dr/inference/distribution/BinomialLikelihood.java
+++ b/src/dr/inference/distribution/BinomialLikelihood.java
@@ -30,8 +30,6 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.Binomial;
-import dr.math.Polynomial;
-import dr.math.matrixAlgebra.Vector;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
diff --git a/src/dr/inference/distribution/GammaDistributionModel.java b/src/dr/inference/distribution/GammaDistributionModel.java
index b3b6876..04bb97e 100644
--- a/src/dr/inference/distribution/GammaDistributionModel.java
+++ b/src/dr/inference/distribution/GammaDistributionModel.java
@@ -39,42 +39,82 @@ import org.w3c.dom.Element;
 /**
  * A class that acts as a model for gamma distributed data.
  *
+ * @author Andrew Rambaut
  * @author Alexei Drummond
  * @version $Id: GammaDistributionModel.java,v 1.6 2005/05/24 20:25:59 rambaut Exp $
  */
 
 public class GammaDistributionModel extends AbstractModel implements ParametricDistributionModel {
 
+    public enum GammaParameterizationType {
+        ShapeScale,
+        ShapeRate,
+        ShapeMean,
+        OneParameter
+    }
+
     public static final String GAMMA_DISTRIBUTION_MODEL = "gammaDistributionModel";
     public static final String ONE_P_GAMMA_DISTRIBUTION_MODEL = "onePGammaDistributionModel";
 
     /**
-     * Construct a constant mutation rate model.
+     * Construct a gamma distribution model with a default shape scale parameterization.
      */
     public GammaDistributionModel(Variable<Double> shape, Variable<Double> scale) {
-
-        super(GAMMA_DISTRIBUTION_MODEL);
-
-        this.shape = shape;
-        this.scale = scale;
-        addVariable(shape);
-        shape.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
-        if (scale != null) {
-            addVariable(scale);
-            scale.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
-        }
+        this(GammaParameterizationType.ShapeScale, shape, scale, 0.0);
     }
 
     /**
-     * Construct a constant mutation rate model.
+     * Construct a one parameter gamma distribution model.
      */
     public GammaDistributionModel(Variable<Double> shape) {
+        this(GammaParameterizationType.OneParameter, shape, null, 0.0);
+    }
+
+
+    /**
+     * Construct a gamma distribution model.
+     */
+    public GammaDistributionModel(GammaParameterizationType parameterization, Variable<Double> shape, Variable<Double> parameter2, double offset) {
 
         super(GAMMA_DISTRIBUTION_MODEL);
 
+        this.offset = offset;
+
+        this.parameterization = parameterization;
         this.shape = shape;
         addVariable(shape);
         shape.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+
+        switch (parameterization) {
+            case ShapeScale:
+                this.scale = parameter2;
+                addVariable(scale);
+                scale.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+                rate = null;
+                mean = null;
+                break;
+            case ShapeRate:
+                this.rate = parameter2;
+                addVariable(rate);
+                rate.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+                scale = null;
+                mean = null;
+                break;
+            case ShapeMean:
+                this.mean = parameter2;
+                addVariable(mean);
+                mean.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
+                scale = null;
+                rate = null;
+                break;
+            case OneParameter:
+                scale = null;
+                rate = null;
+                mean = null;
+                break;
+            default:
+                throw new IllegalArgumentException("Unknown parameterization type");
+        }
     }
 
     // *****************************************************************
@@ -82,27 +122,30 @@ public class GammaDistributionModel extends AbstractModel implements ParametricD
     // *****************************************************************
 
     public double pdf(double x) {
-        return GammaDistribution.pdf(x, getShape(), getScale());
+        if (x < offset) return 0.0;
+        return GammaDistribution.pdf(x - offset, getShape(), getScale());
     }
 
     public double logPdf(double x) {
-        return GammaDistribution.logPdf(x, getShape(), getScale());
+        if (x < offset) return Double.NEGATIVE_INFINITY;
+        return GammaDistribution.logPdf(x - offset, getShape(), getScale());
     }
 
     public double cdf(double x) {
-        return GammaDistribution.cdf(x, getShape(), getScale());
+        if (x < offset) return 0.0;
+        return GammaDistribution.cdf(x - offset, getShape(), getScale());
     }
 
     public double quantile(double y) {
         try {
-            return (new GammaDistributionImpl(getShape(), getScale())).inverseCumulativeProbability(y);
+            return (new GammaDistributionImpl(getShape(), getScale())).inverseCumulativeProbability(y) + offset;
         } catch (MathException e) {
             return Double.NaN;
         }
     }
 
     public double mean() {
-        return GammaDistribution.mean(getShape(), getScale());
+        return GammaDistribution.mean(getShape(), getScale()) + offset;
     }
 
     public double variance() {
@@ -119,7 +162,7 @@ public class GammaDistributionModel extends AbstractModel implements ParametricD
         }
 
         public final double getLowerBound() {
-            return 0.0;
+            return offset;
         }
 
         public final double getUpperBound() {
@@ -161,15 +204,32 @@ public class GammaDistributionModel extends AbstractModel implements ParametricD
     }
 
     public double getScale() {
-        if (scale == null) return (1.0 / getShape());
-        return scale.getValue(0);
+        switch (parameterization) {
+            case ShapeScale:
+                return scale.getValue(0);
+            case ShapeRate:
+                return (1.0 / rate.getValue(0));
+            case ShapeMean:
+                return (mean.getValue(0) / getShape());
+            case OneParameter:
+                return (1.0 / getShape());
+            default:
+                throw new IllegalArgumentException("Unknown parameterization type");
+        }
     }
 
     // **************************************************************
     // Private instance variables
     // **************************************************************
 
-    private Variable<Double> shape = null;
-    private Variable<Double> scale = null;
+    private final GammaParameterizationType parameterization;
+
+    private final Variable<Double> shape;
+    private final Variable<Double> scale;
+    private final Variable<Double> rate;
+    private final Variable<Double> mean;
+
+    private final double offset;
+
 }
 
diff --git a/src/dr/inference/distribution/InverseGammaDistributionModel.java b/src/dr/inference/distribution/InverseGammaDistributionModel.java
index b28a2cb..15c6860 100644
--- a/src/dr/inference/distribution/InverseGammaDistributionModel.java
+++ b/src/dr/inference/distribution/InverseGammaDistributionModel.java
@@ -30,10 +30,7 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.UnivariateFunction;
-import dr.math.distributions.GammaDistribution;
 import dr.math.distributions.InverseGammaDistribution;
-import org.apache.commons.math.MathException;
-import org.apache.commons.math.distribution.GammaDistributionImpl;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
diff --git a/src/dr/inference/distribution/LogNormalDistributionModel.java b/src/dr/inference/distribution/LogNormalDistributionModel.java
index 18fed9a..d14320e 100644
--- a/src/dr/inference/distribution/LogNormalDistributionModel.java
+++ b/src/dr/inference/distribution/LogNormalDistributionModel.java
@@ -29,7 +29,6 @@ import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
-import dr.inference.loggers.Logger;
 import dr.inferencexml.distribution.LogNormalDistributionModelParser;
 import dr.math.UnivariateFunction;
 import dr.math.distributions.NormalDistribution;
diff --git a/src/dr/inference/distribution/MultivariateDistributionLikelihood.java b/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
index 569a7d0..1c8c0e6 100644
--- a/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
+++ b/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
@@ -1,7 +1,7 @@
 /*
  * MultivariateDistributionLikelihood.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -40,6 +40,7 @@ import java.util.logging.Logger;
 
 /**
  * @author Marc Suchard
+ * @author Guy Baele
  */
 public class MultivariateDistributionLikelihood extends AbstractDistributionLikelihood {
 
@@ -49,7 +50,8 @@ public class MultivariateDistributionLikelihood extends AbstractDistributionLike
     public static final String MVN_CV = "coefficientOfVariation";
     public static final String WISHART_PRIOR = "multivariateWishartPrior";
     public static final String INV_WISHART_PRIOR = "multivariateInverseWishartPrior";
-    public static final String DIRICHLET_PRIOR = "dirichletPrior";
+    public static final String DIRICHLET_PRIOR = "dirichletParameterPrior";
+    public static final String SUM_TO_NUMBER_OF_ELEMENTS = "sumToNumberOfElements";
     public static final String DF = "df";
     public static final String SCALE_MATRIX = "scaleMatrix";
     public static final String MVGAMMA_PRIOR = "multivariateGammaPrior";
@@ -179,11 +181,15 @@ public class MultivariateDistributionLikelihood extends AbstractDistributionLike
 
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
+            boolean sumConstraint = false;
+            if (xo.hasAttribute(SUM_TO_NUMBER_OF_ELEMENTS)) {
+                sumConstraint = xo.getBooleanAttribute(SUM_TO_NUMBER_OF_ELEMENTS);
+            }
 
             XMLObject cxo = xo.getChild(COUNTS);
             Parameter counts = (Parameter) cxo.getChild(Parameter.class);
 
-            DirichletDistribution dirichlet = new DirichletDistribution(counts.getParameterValues());
+            DirichletDistribution dirichlet = new DirichletDistribution(counts.getParameterValues(), sumConstraint);
 
             MultivariateDistributionLikelihood likelihood = new MultivariateDistributionLikelihood(
                     dirichlet);
@@ -205,6 +211,7 @@ public class MultivariateDistributionLikelihood extends AbstractDistributionLike
         }
 
         private final XMLSyntaxRule[] rules = {
+                AttributeRule.newBooleanRule(SUM_TO_NUMBER_OF_ELEMENTS, true),
                 new ElementRule(COUNTS,
                         new XMLSyntaxRule[]{new ElementRule(Parameter.class)}),
                 new ElementRule(DATA,
diff --git a/src/dr/inference/distribution/MultivariateNormalDistributionModel.java b/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
index a68a6a9..4b1824f 100644
--- a/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
+++ b/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
@@ -51,6 +51,16 @@ public class MultivariateNormalDistributionModel extends AbstractModel implement
         this.precision = precParameter;
         addVariable(precParameter);
 
+        Parameter single = null;
+        if (precParameter instanceof DiagonalMatrix) {
+            DiagonalMatrix dm = (DiagonalMatrix) precParameter;
+            if (dm.getDiagonalParameter() instanceof DuplicatedParameter) {
+                single = dm.getDiagonalParameter();
+            }
+        }
+        hasSinglePrecision = (single != null);
+        singlePrecision = single;
+
         distribution = createNewDistribution();
         distributionKnown = true;
     }
@@ -126,11 +136,17 @@ public class MultivariateNormalDistributionModel extends AbstractModel implement
     // **************************************************************
 
     private MultivariateNormalDistribution createNewDistribution() {
-        return new MultivariateNormalDistribution(getMean(), getScaleMatrix());
+        if (hasSinglePrecision) {
+            return new MultivariateNormalDistribution(getMean(), singlePrecision.getParameterValue(0));
+        } else {
+            return new MultivariateNormalDistribution(getMean(), getScaleMatrix());
+        }
     }
 
     private final Parameter mean;
     private final MatrixParameter precision;
+    private final boolean hasSinglePrecision;
+    private final Parameter singlePrecision;
     private MultivariateNormalDistribution distribution;
     private MultivariateNormalDistribution storedDistribution;
 
diff --git a/src/dr/inference/distribution/NormalDistributionModel.java b/src/dr/inference/distribution/NormalDistributionModel.java
index 7a4d434..efc91c9 100644
--- a/src/dr/inference/distribution/NormalDistributionModel.java
+++ b/src/dr/inference/distribution/NormalDistributionModel.java
@@ -25,13 +25,11 @@
 
 package dr.inference.distribution;
 
-import dr.inference.model.AbstractModel;
-import dr.inference.model.Model;
-import dr.inference.model.Parameter;
-import dr.inference.model.Variable;
+import dr.inference.model.*;
 import dr.inferencexml.distribution.NormalDistributionModelParser;
 import dr.math.MathUtils;
 import dr.math.UnivariateFunction;
+import dr.math.distributions.GaussianProcessRandomGenerator;
 import dr.math.distributions.NormalDistribution;
 import dr.math.distributions.RandomGenerator;
 import org.w3c.dom.Document;
@@ -44,7 +42,7 @@ import org.w3c.dom.Element;
  * @version $Id: NormalDistributionModel.java,v 1.6 2005/05/24 20:25:59 rambaut Exp $
  */
 
-public class NormalDistributionModel extends AbstractModel implements ParametricDistributionModel, RandomGenerator {
+public class NormalDistributionModel extends AbstractModel implements ParametricDistributionModel, GaussianProcessRandomGenerator {
     /**
      * Constructor.
      */
@@ -186,4 +184,9 @@ public class NormalDistributionModel extends AbstractModel implements Parametric
         double v = (Double) x;
         return logPdf(v);
     }
+
+    @Override
+    public Likelihood getLikelihood() {
+        return null;
+    }
 }
diff --git a/src/dr/inference/distribution/TruncatedNormalDistributionModel.java b/src/dr/inference/distribution/TruncatedNormalDistributionModel.java
index 9ffcd95..2330b85 100644
--- a/src/dr/inference/distribution/TruncatedNormalDistributionModel.java
+++ b/src/dr/inference/distribution/TruncatedNormalDistributionModel.java
@@ -78,7 +78,7 @@ public class TruncatedNormalDistributionModel extends AbstractModel implements P
         this.minimum = minimum;
         this.maximum = maximum;
         this.
-        addVariable(mean);
+                addVariable(mean);
         mean.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 1));
         addVariable(stdev);
         stdev.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
diff --git a/src/dr/inference/markovchain/MarkovChain.java b/src/dr/inference/markovchain/MarkovChain.java
index d45ba61..622d1ae 100644
--- a/src/dr/inference/markovchain/MarkovChain.java
+++ b/src/dr/inference/markovchain/MarkovChain.java
@@ -53,7 +53,7 @@ public final class MarkovChain implements Serializable {
     private final static boolean DEBUG = false;
     private final static boolean PROFILE = true;
 
-    public static final double EVALUATION_TEST_THRESHOLD = 1e-6;
+    public static final double EVALUATION_TEST_THRESHOLD = 1e-1;
 
     private final OperatorSchedule schedule;
     private final Acceptor acceptor;
@@ -89,6 +89,15 @@ public final class MarkovChain implements Serializable {
         this.minOperatorCountForFullEvaluation = minOperatorCountForFullEvaluation;
         this.evaluationTestThreshold = evaluationTestThreshold;
 
+        Likelihood.CONNECTED_LIKELIHOOD_SET.add(likelihood);
+        Likelihood.CONNECTED_LIKELIHOOD_SET.addAll(likelihood.getLikelihoodSet());
+
+        for (Likelihood l : Likelihood.FULL_LIKELIHOOD_SET) {
+            if (!Likelihood.CONNECTED_LIKELIHOOD_SET.contains(l)) {
+                System.err.println("WARNING: Likelihood component, " + l.getId() + ", created but not used in the MCMC");
+            }
+        }
+
         currentScore = evaluate(likelihood, prior);
     }
 
@@ -225,8 +234,8 @@ public final class MarkovChain implements Serializable {
                 operatorSucceeded = false;
             }
 
-            double score = 0.0;
-            double deviation = 0.0;
+            double score = Double.NaN;
+            double deviation = Double.NaN;
 
             //    System.err.print("" + currentState + ": ");
             if (operatorSucceeded) {
@@ -247,7 +256,11 @@ public final class MarkovChain implements Serializable {
                 score = evaluate(likelihood, prior);
 
                 if (PROFILE) {
-                    mcmcOperator.addEvaluationTime(System.currentTimeMillis() - elapsedTime);
+                    long duration = System.currentTimeMillis() - elapsedTime;
+                    if (DEBUG) {
+                        System.out.println("Time: " + duration);
+                    }
+                    mcmcOperator.addEvaluationTime(duration);
                 }
 
                 String diagnosticOperator = "";
diff --git a/src/dr/inference/markovjumps/TwoStateOccupancyMarkovReward.java b/src/dr/inference/markovjumps/TwoStateOccupancyMarkovReward.java
index ca8151b..67e001d 100644
--- a/src/dr/inference/markovjumps/TwoStateOccupancyMarkovReward.java
+++ b/src/dr/inference/markovjumps/TwoStateOccupancyMarkovReward.java
@@ -31,6 +31,7 @@ import dr.app.beagle.evomodel.substmodel.EigenSystem;
 import dr.math.Binomial;
 import dr.math.GammaFunction;
 import dr.math.distributions.GammaDistribution;
+import dr.math.distributions.GeneralizedIntegerGammaDistribution;
 import dr.math.matrixAlgebra.Vector;
 
 /**
@@ -205,154 +206,44 @@ public class TwoStateOccupancyMarkovReward implements MarkovReward {
 
         if (jumpProbabilities == null) {
             jumpProbabilities = new double[maxK + 1];
-            computeJumpProbabilities(lambda0, lambda1, time, C, D, jumpProbabilities);
+//            computeJumpProbabilities(lambda0, lambda1, time, C, D, jumpProbabilities);   // Error: probs are function of time
         }
+        computeJumpProbabilities(lambda0, lambda1, time, C, D, jumpProbabilities); // are function of time.
+        // TODO Could cache computeJumpProbabilities(key = time) in HashMap
 
         if (symmetric) {
             // Single rate (symmetric)
-//            final double lambda = -Q[idx(0, 0)];
             final double scale = 1.0 / lambda0;
-//            final double logLambdaTime = Math.log(lambda) + Math.log(time);
-
-//            final double multiplier = Math.exp(-lambda * time);
-
             double sum = 0.0;
+
             // if time - x > 0, then there must have been at least k = 2 jumps
             for (int m = 1; m <= maxK / 2; ++m) {
                 final int k = 2 * m;
                 sum +=  jumpProbabilities[k] *
                         Math.exp(
-//                        k * logLambdaTime - GammaFunction.lnGamma(k + 1)
                                 + GammaDistribution.logPdf(x, m, scale)
                                 + GammaDistribution.logPdf(time - x, m + 1, scale)
                                 - GammaDistribution.logPdf(time, k + 1, scale)
                 );
             }
-            return //multiplier *
-                    sum;
+            return sum;
         } else {
-
-            // Test partial fractions
-            GeneralizedIntegerGammaDistribution gigd = new GeneralizedIntegerGammaDistribution(4, 10, 0.2, 2.0);
-
-            System.err.println(gigd.generatingFunction(0.5));
-            System.err.println(gigd.generatingFunctionPartialFraction(0.5));
-            System.exit(-1);
-
             // Two rate model
             double sum = 0.0;
             for (int m = 1; m <= maxK / 2; ++m) {
                 final int k = 2 * m;
-                sum += jumpProbabilities[k] +
-                        Math.exp(GammaDistribution.logPdf(x, m, lambda1) // TODO check which rate
-                                        * GammaDistribution.logPdf(time - x, m + 1, lambda0)) // TODO check which rate
-                                / GeneralizedIntegerGammaDistribution.pdf(time, m, m + 1, lambda1, lambda0);
-            }
-
-
-
-            return sum;
-        }
-    }
-
-    static class GeneralizedIntegerGammaDistribution {
-
-        private int shape1, shape2;
-        private double rate1, rate2;
-
-        private double[] A = null;
-        private double[] B = null;
-
-        GeneralizedIntegerGammaDistribution(int shape1, int shape2, double rate1, double rate2) {
-            this.shape1 = shape1; this.shape2 = shape2;
-            this.rate1 = rate1; this.rate2 = rate2;
-        }
-
-        public double generatingFunction(double s) {
-            return Math.pow(rate1 / (rate1 + s), shape1) * Math.pow(rate2 / (rate2 + s), shape2);
-        }
-
-
-//        http://www.ism.ac.jp/editsec/aism/pdf/034_3_0591.pdf
-        public double generatingFunctionPartialFraction(double s) {
-            if (A == null) {
-                computeCoefficients();
+                sum += jumpProbabilities[k] *
+                        GammaDistribution.pdf(x, m, 1.0 / lambda1) *
+                        GammaDistribution.pdf(time - x, m + 1, 1.0 / lambda0) /
+                        GeneralizedIntegerGammaDistribution.pdf(time, m, m + 1, lambda1, lambda0); // TODO Cache
             }
-            double sum = 0.0;
-
-            for (int i = 1; i <= shape1; ++i) {
-                sum += A[i] / Math.pow(rate1 + s, i);
-            }
-
-            for (int i = 1; i <= shape2; ++i) {
-                sum += B[i] / Math.pow(rate2 + s, i);
-            }
-
-//            double B1 = -rate1 * rate2 * rate2 * (Math.pow(rate1 - rate2, -1) + Math.pow(rate1 - rate2, -2));
-
-//            double A1 = rate1 * rate2 * rate2 * Math.pow(rate2 - rate1, -2);
-//            double B1 = -rate1 * rate2 * rate2 * Math.pow(rate1 - rate2, -2);
-//            double B2 =  rate1 * rate2 * rate2 * Math.pow(rate1 - rate2, -1);
-
-//            System.err.println("A1: " + A1 + " " + A[1]);
-//            System.err.println("B1: " + B1 + " " + B[1]);
-//            System.err.println("B2: " + B2 + " " + B[2]);
-
-//            double sum2 = A1 / (rate1 + s) + B1 / (rate2 + s) + B2 / ((rate2 + s) * (rate2 + s));
-
-//            return sum2;
+            // TODO Remove code duplication in if (symmetric) { } else { }
             return sum;
         }
-
-        private void computeCoefficients() {
-            A = new double[shape1 + 1];
-            B = new double[shape2 + 1];
-
-            final double lambdaFactor = Math.pow(rate1, shape1) * Math.pow(rate2, shape2);
-
-            int sign = 1;
-            double factorial = 1;
-            for (int i = 1; i <= shape1; ++i) {
-                if (i > 1 && (shape2 + i - 2) > 1) {
-                    factorial *= shape2 + i - 2;
-                    factorial /= i - 1;
-                }
-//                System.err.println("A: " + Binomial.choose(shape2 + i - 1, i - 1));
-
-                System.err.println("A[" + (shape1 - i + 1) + "]: " + factorial);
-                A[shape1 - i + 1] =
-//                        Binomial.choose(shape2 + i - 1, i - 1) *
-                        factorial *
-                        sign * lambdaFactor / Math.pow(rate2 - rate1, shape2 + i - 1); // shape1 - i + 1
-                sign *= -1;
-            }
-
-            sign = 1;
-            factorial = 1;
-            for (int i = 1; i <= shape2; ++i) {
-                if (i > 1 && (shape1 + i - 2) > 1) {
-//                    System.err.println((shape1 + i - 2) + " " + i);
-                    factorial *= shape1 + i - 2;
-                    factorial /= i - 1;
-                }
-
-                System.err.println("B[" + (shape2 - i + 1) + "]: " + factorial);
-                B[shape2 - i + 1] =
-//                        Binomial.choose(shape1 + i - 1, i - 1) *
-                        factorial *
-                        sign * lambdaFactor / Math.pow(rate1 - rate2, shape1 + i - 1); // shape2 - i + 1
-                sign *= -1;
-            }
-        }
-
-        public static double pdf(double x, int shape1, int shape2, double rate1, double rate2) {
-            return 1.0;
-        }
     }
 
     public double[] computePdf(double x, double time) {
-//        return computePdf(new double[]{x}, time)[0];
-        return null;
+        throw new RuntimeException("Not yet implemented");
     }
 
     private double[][] squareMatrix(final double[] mat) {
diff --git a/src/dr/inference/mcmc/DebugUtils.java b/src/dr/inference/mcmc/DebugUtils.java
index 927f7d6..b7aa8eb 100644
--- a/src/dr/inference/mcmc/DebugUtils.java
+++ b/src/dr/inference/mcmc/DebugUtils.java
@@ -45,8 +45,11 @@ import dr.evomodel.tree.TreeModel;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
+import dr.math.MathUtils;
 
 import java.io.*;
+import java.util.HashSet;
+import java.util.Set;
 
 public class DebugUtils {
 
@@ -63,13 +66,21 @@ public class DebugUtils {
             fileOut = new FileOutputStream(file);
             PrintStream out = new PrintStream(fileOut);
 
-            out.print("state\t");
+            int[] rngState = MathUtils.getRandomState();
+            out.print("rng");
+            for (int i = 0; i < rngState.length; i++) {
+                out.print("\t");
+                out.print(rngState[i]);
+            }
+            out.println();
+
+            out.print("\nstate\t");
             out.println(state);
 
             out.print("lnL\t");
             out.println(lnL);
 
-            for (Parameter parameter : Parameter.FULL_PARAMETER_SET) {
+            for (Parameter parameter : Parameter.CONNECTED_PARAMETER_SET) {
                 out.print(parameter.getParameterName());
                 out.print("\t");
                 out.print(parameter.getDimension());
@@ -80,7 +91,7 @@ public class DebugUtils {
                 out.println();
             }
 
-            for (Model model : Model.FULL_MODEL_SET) {
+            for (Model model : Model.CONNECTED_MODEL_SET) {
                 if (model instanceof TreeModel) {
                     out.print(model.getModelName());
                     out.print("\t");
@@ -94,6 +105,11 @@ public class DebugUtils {
             System.err.println("Unable to write file: " + ioe.getMessage());
             return false;
         }
+
+//        for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
+//            System.err.println(likelihood.getId() + ": " + likelihood.getLogLikelihood());
+//        }
+
         return true;
     }
 
@@ -112,8 +128,26 @@ public class DebugUtils {
             FileReader fileIn = new FileReader(file);
             BufferedReader in = new BufferedReader(fileIn);
 
+            int[] rngState = null;
+
             String line = in.readLine();
             String[] fields = line.split("\t");
+            if (fields[0].equals("rng")) {
+                // if there is a random number generator state present then load it...
+                try {
+                    rngState = new int[fields.length - 1];
+                    for (int i = 0; i < rngState.length; i++) {
+                        rngState[i] = Integer.parseInt(fields[i + 1]);
+                    }
+
+                } catch (NumberFormatException nfe) {
+                    throw new RuntimeException("Unable to read state number from state file");
+                }
+
+                line = in.readLine();
+                fields = line.split("\t");
+            }
+
             try {
                 if (!fields[0].equals("state")) {
                     throw new RuntimeException("Unable to read state number from state file");
@@ -136,7 +170,7 @@ public class DebugUtils {
                 throw new RuntimeException("Unable to read lnL from state file");
             }
 
-            for (Parameter parameter : Parameter.FULL_PARAMETER_SET) {
+            for (Parameter parameter : Parameter.CONNECTED_PARAMETER_SET) {
                 line = in.readLine();
                 fields = line.split("\t");
 //                if (!fields[0].equals(parameter.getParameterName())) {
@@ -148,31 +182,65 @@ public class DebugUtils {
                     System.err.println("Unable to match state parameter dimension: " + dimension + ", expecting " + parameter.getDimension());
                 }
 
-                for (int dim = 0; dim < parameter.getDimension(); dim++) {
-                    parameter.setParameterValue(dim, Double.parseDouble(fields[dim + 2]));
+                if (fields[0].equals("branchRates.categories.rootNodeNumber")) {
+                    System.out.println("eek");
+                    double value = Double.parseDouble(fields[2]);
+                    parameter.setParameterValue(0, 160.0);
+                } else {
+                    for (int dim = 0; dim < parameter.getDimension(); dim++) {
+                        parameter.setParameterValue(dim, Double.parseDouble(fields[dim + 2]));
+                    }
                 }
+
             }
 
             // load the tree models last as we get the node heights from the tree (not the parameters which
             // which may not be associated with the right node
-            for (Model model : Model.FULL_MODEL_SET) {
+            Set<String> expectedTreeModelNames = new HashSet<String>();
+            for (Model model : Model.CONNECTED_MODEL_SET) {
                 if (model instanceof TreeModel) {
-                    line = in.readLine();
-                    fields = line.split("\t");
-                    if (!fields[0].equals(model.getModelName())) {
-                        throw new RuntimeException("Unable to match state parameter: " + fields[0] + ", expecting " + model.getModelName());
+                    expectedTreeModelNames.add(model.getModelName());
+                }
+            }
+
+            // Read in all (possibly more than one) tree
+            while((line = in.readLine()) != null) {
+                fields = line.split("\t");
+                boolean treeFound = false;
+
+                for (Model model : Model.CONNECTED_MODEL_SET) {
+                    if (model instanceof TreeModel && fields[0].equals(model.getModelName())) {
+                        treeFound = true;
+                        NewickImporter importer = new NewickImporter(fields[1]);
+                        Tree tree = importer.importNextTree();
+                        ((TreeModel) model).beginTreeEdit();
+                        ((TreeModel) model).adoptTreeStructure(tree);
+                        ((TreeModel) model).endTreeEdit();
+
+                        expectedTreeModelNames.remove(model.getModelName());
                     }
-                    NewickImporter importer = new NewickImporter(fields[1]);
-                    Tree tree = importer.importNextTree();
-                    ((TreeModel) model).beginTreeEdit();
-                    ((TreeModel) model).adoptTreeStructure(tree);
-                    ((TreeModel) model).endTreeEdit();
                 }
+
+                if (!treeFound) {
+                    throw new RuntimeException("Unable to match state parameter: " + fields[0]);
+                }
+            }
+
+            if (expectedTreeModelNames.size() > 0) {
+                StringBuilder sb = new StringBuilder();
+                for (String notFoundName : expectedTreeModelNames) {
+                    sb.append("Expecting, but unable to match state parameter:" + notFoundName + "\n");
+                }
+                throw new RuntimeException(sb.toString());
+            }
+
+            if (rngState != null) {
+                MathUtils.setRandomState(rngState);
             }
 
             in.close();
             fileIn.close();
-            for (Likelihood likelihood : Likelihood.FULL_LIKELIHOOD_SET) {
+            for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
                 likelihood.makeDirty();
             }
         } catch (IOException ioe) {
diff --git a/src/dr/inference/mcmc/MCMC.java b/src/dr/inference/mcmc/MCMC.java
index e01dc87..7c803c4 100644
--- a/src/dr/inference/mcmc/MCMC.java
+++ b/src/dr/inference/mcmc/MCMC.java
@@ -167,6 +167,7 @@ public class MCMC implements Identifiable, Spawnable, Loggable {
             long debugWriteEvery = Long.parseLong(System.getProperty(DUMP_EVERY));
             mc.addMarkovChainListener(new DebugChainListener(this, debugWriteEvery, true));
         }
+
     }
 
     /**
@@ -237,9 +238,16 @@ public class MCMC implements Identifiable, Spawnable, Loggable {
 
                 double lnL = mc.evaluate();
 
+                DebugUtils.writeStateToFile(new File("tmp.dump"), loadedState, lnL);
+
                 if (lnL != savedLnL[0]) {
-                   throw new RuntimeException("Dumped lnL does not match loaded state");
+                        throw new RuntimeException("Dumped lnL does not match loaded state: stored lnL: " + savedLnL[0] +
+                                ", recomputed lnL: " + lnL + " (difference " + (savedLnL[0] - lnL) + ")");
                 }
+
+//                for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
+//                    System.err.println(likelihood.getId() + ": " + likelihood.getLogLikelihood());
+//                }
             }
 
             mc.addMarkovChainListener(chainListener);
@@ -275,7 +283,8 @@ public class MCMC implements Identifiable, Spawnable, Loggable {
                 double lnL2 = mc.evaluate();
 
                 if (lnL1 != lnL2) {
-                    throw new RuntimeException("Likelihood different after state load");
+                    throw new RuntimeException("Dumped lnL does not match loaded state: stored lnL: " + lnL1 +
+                            ", recomputed lnL: " + lnL2 + " (difference " + (lnL2 - lnL1) + ")");
                 }
                 // TEST Code end
             }
@@ -633,4 +642,3 @@ public class MCMC implements Identifiable, Spawnable, Loggable {
 
     private String id = null;
 }
-
diff --git a/src/dr/inference/model/AbstractModel.java b/src/dr/inference/model/AbstractModel.java
index c09cd7c..eb89210 100644
--- a/src/dr/inference/model/AbstractModel.java
+++ b/src/dr/inference/model/AbstractModel.java
@@ -53,6 +53,7 @@ public abstract class AbstractModel implements Model, ModelListener, VariableLis
      * list then it does nothing.
      */
     public void addModel(Model model) {
+        Model.CONNECTED_MODEL_SET.add(model);
 
         if (!models.contains(model)) {
             models.add(model);
@@ -74,6 +75,10 @@ public abstract class AbstractModel implements Model, ModelListener, VariableLis
     }
 
     public final void addVariable(Variable variable) {
+        if (variable instanceof Parameter) {
+            Parameter.CONNECTED_PARAMETER_SET.add((Parameter)variable);
+        }
+
         if (!variables.contains(variable)) {
             variables.add(variable);
             variable.addVariableListener(this);
@@ -121,7 +126,7 @@ public abstract class AbstractModel implements Model, ModelListener, VariableLis
         return listenerHelper.getListenerCount() > 0;
     }
 
-    
+
     /**
      * Fires a model changed event.
      */
diff --git a/src/dr/inference/model/AbstractModelLikelihood.java b/src/dr/inference/model/AbstractModelLikelihood.java
index cc0fcae..39928ea 100644
--- a/src/dr/inference/model/AbstractModelLikelihood.java
+++ b/src/dr/inference/model/AbstractModelLikelihood.java
@@ -28,6 +28,10 @@ package dr.inference.model;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
 /**
  * @author Joseph Heled
  *         Date: 16/04/2009
@@ -45,6 +49,11 @@ public abstract class AbstractModelLikelihood extends AbstractModel implements L
     }
 
     @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
+    @Override
     public boolean isUsed() {
         return isUsed;
     }
diff --git a/src/dr/inference/model/BlockUpperTriangularMatrixParameter.java b/src/dr/inference/model/BlockUpperTriangularMatrixParameter.java
index 6304b62..0fc28cd 100644
--- a/src/dr/inference/model/BlockUpperTriangularMatrixParameter.java
+++ b/src/dr/inference/model/BlockUpperTriangularMatrixParameter.java
@@ -141,11 +141,17 @@ public class BlockUpperTriangularMatrixParameter extends MatrixParameter {
         return PID/getRowDimension();
     }
 
-    public void setParameterValue(int row, int col, double value){
+    public void setParameterValueQuietly(int row, int col, double value){
          if(matrixCondition(row, col)){
-             getParameter(col).setParameterValue(row, value);
+             getParameter(col).setParameterValueQuietly(getInnerDimension(row,col), value);
         }
     }
+
+    public void setParameterValue(int row, int col,double value){
+        setParameterValueQuietly(row, col, value);
+        fireParameterChangedEvent();
+    }
+
     public void setParameterValue(int PID, double value){
 
         int row=getRow(PID);
diff --git a/src/dr/inference/model/CompoundFastMatrixParameter.java b/src/dr/inference/model/CompoundFastMatrixParameter.java
new file mode 100644
index 0000000..b3b1bf6
--- /dev/null
+++ b/src/dr/inference/model/CompoundFastMatrixParameter.java
@@ -0,0 +1,210 @@
+/*
+ * CompoundFastMatrixParameter.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+import dr.xml.*;
+import mpi.MPI;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class CompoundFastMatrixParameter extends CompoundParameter implements MatrixParameterInterface {
+
+    private final int rowDimension;
+    private int colDimension;
+
+    private final List<MatrixParameterInterface> columns = new ArrayList<MatrixParameterInterface>();
+    private final List<Integer> offsets = new ArrayList<Integer>();
+    private final List<MatrixParameterInterface> matrices;
+
+    public CompoundFastMatrixParameter(String name, List<MatrixParameterInterface> matrices) {
+        super(name, compoundMatrices(matrices));
+
+        this.matrices = matrices;
+
+        rowDimension = matrices.get(0).getRowDimension();
+        colDimension = 0;
+
+        for (MatrixParameterInterface matrix : matrices) {
+            if (matrix.getRowDimension() != rowDimension) {
+                throw new IllegalArgumentException("Inconsistent row dimensions");
+            }
+
+            for (int i = 0; i < matrix.getColumnDimension(); ++i) {
+                columns.add(matrix);
+                offsets.add(i);
+            }
+
+            colDimension += matrix.getColumnDimension();
+        }
+    }
+
+    private static Parameter[] compoundMatrices(List<MatrixParameterInterface> matrices) {
+        int length = 0;
+        for (MatrixParameterInterface matrix : matrices) {
+            length += matrix.getUniqueParameterCount();
+        }
+
+        Parameter[] parameters = new Parameter[length];
+        int index = 0;
+
+        for (MatrixParameterInterface matrix : matrices) {
+            for (int i = 0; i < matrix.getUniqueParameterCount(); ++i) {
+                parameters[index] = matrix.getUniqueParameter(i);
+                ++index;
+            }
+        }
+
+        return parameters;
+    }
+
+    @Override
+    public Parameter getParameter(int column) {
+        return columns.get(column).getParameter(offsets.get(column));
+    }
+
+    @Override
+    public double getParameterValue(int row, int col) {
+        return columns.get(col).getParameterValue(row, offsets.get(col));
+    }
+
+    @Override
+    public void setParameterValue(int row, int col, double value) {
+        columns.get(col).setParameterValue(row, offsets.get(col), value);
+    }
+
+    @Override
+    public void setParameterValueQuietly(int row, int col, double value) {
+        columns.get(col).setParameterValueQuietly(row, offsets.get(col), value);
+    }
+
+    @Override
+    public void setParameterValueNotifyChangedAll(int row, int column, double value) {
+        columns.get(column).setParameterValueNotifyChangedAll(row, offsets.get(column), value);
+    }
+
+    @Override
+    public double[] getColumnValues(int col) {
+        throw new RuntimeException("Not yet implemented");
+    }
+
+    @Override
+    public double[][] getParameterAsMatrix() {
+        throw new RuntimeException("Not yet implemented");
+    }
+
+    @Override
+    public int getColumnDimension() {
+        return colDimension;
+    }
+
+    @Override
+    public int getRowDimension() {
+        return rowDimension;
+    }
+
+    @Override
+    public int getUniqueParameterCount() {
+        return getParameterCount();
+    }
+
+    @Override
+    public Parameter getUniqueParameter(int index) {
+        return getParameter(index);
+    }
+
+    @Override
+    public void copyParameterValues(double[] destination, int offset) {
+        for (MatrixParameterInterface matrix : matrices) {
+            matrix.copyParameterValues(destination, offset);
+            offset += matrix.getRowDimension() * matrix.getColumnDimension();
+        }
+    }
+
+    @Override
+    public void setAllParameterValuesQuietly(double[] values, int offset) {
+        for (MatrixParameterInterface matrix : matrices) {
+            matrix.setAllParameterValuesQuietly(values, offset);
+            offset += matrix.getRowDimension() * matrix.getColumnDimension();
+        }
+    }
+
+    @Override
+    public double[] getParameterValues() {
+        int length = 0;
+        for (MatrixParameterInterface matrix : matrices) {
+            length += matrix.getRowDimension() * matrix.getColumnDimension();
+        }
+        double[] rtn = new double[length];
+        copyParameterValues(rtn, 0);
+        return rtn;
+    }
+
+    public final static String COMPOUND_FAST_MATRIX_PARAMETER = "compoundFastMatrixParameter";
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public String getParserName() {
+            return COMPOUND_FAST_MATRIX_PARAMETER;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            List<MatrixParameterInterface> matrices = new ArrayList<MatrixParameterInterface>();
+
+            for (int i = 0; i < xo.getChildCount(); ++i) {
+                matrices.add((MatrixParameterInterface) xo.getChild(i));
+            }
+
+            final String name = xo.hasId() ? xo.getId() : null;
+
+            return new CompoundFastMatrixParameter(name, matrices);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "A compound matrix parameter constructed from its component parameters.";
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                new ElementRule(MatrixParameterInterface.class, 1, Integer.MAX_VALUE),
+        };
+
+        public Class getReturnType() {
+            return CompoundFastMatrixParameter.class;
+        }
+    };
+}
diff --git a/src/dr/inference/model/CompoundLikelihood.java b/src/dr/inference/model/CompoundLikelihood.java
index 9bf1668..ce2bec3 100644
--- a/src/dr/inference/model/CompoundLikelihood.java
+++ b/src/dr/inference/model/CompoundLikelihood.java
@@ -29,9 +29,7 @@ import dr.app.beagle.evomodel.branchmodel.lineagespecific.BeagleBranchLikelihood
 import dr.util.NumberFormatter;
 import dr.xml.Reportable;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
+import java.util.*;
 import java.util.concurrent.*;
 
 /**
@@ -157,6 +155,15 @@ public class CompoundLikelihood implements Likelihood, Reportable {
         
     }//END: addLikelihood
 
+    public Set<Likelihood> getLikelihoodSet() {
+        Set<Likelihood> set = new HashSet<Likelihood>();
+        for (Likelihood l : likelihoods) {
+            set.add(l);
+            set.addAll(l.getLikelihoodSet());
+        }
+        return set;
+    }
+
     public int getLikelihoodCount() {
         return likelihoods.size();
     }
diff --git a/src/dr/inference/model/CompoundParameter.java b/src/dr/inference/model/CompoundParameter.java
index 9490dff..e56bdde 100644
--- a/src/dr/inference/model/CompoundParameter.java
+++ b/src/dr/inference/model/CompoundParameter.java
@@ -206,26 +206,26 @@ public class CompoundParameter extends Parameter.Abstract implements VariableLis
         parameters.get(dim).setParameterValue(pindex.get(dim), value);
     }
 
-    public void setParameterValue(int row, int column, double a)
-    {
-        getParameter(column).setParameterValue(row, a);
-    }
+//    public void setParameterValue(int row, int column, double a)
+//    {
+//        getParameter(column).setParameterValue(row, a);
+//    }
 
     public void setParameterValueQuietly(int dim, double value) {
         parameters.get(dim).setParameterValueQuietly(pindex.get(dim), value);
     }
 
-    public void setParameterValueQuietly(int row, int column, double a){
-        getParameter(column).setParameterValueQuietly(row, a);
-    }
+//    public void setParameterValueQuietly(int row, int column, double a){
+//        getParameter(column).setParameterValueQuietly(row, a);
+//    }
 
     public void setParameterValueNotifyChangedAll(int dim, double value) {
         parameters.get(dim).setParameterValueNotifyChangedAll(pindex.get(dim), value);
     }
 
-    public void setParameterValueNotifyChangedAll(int row, int column, double val){
-        getParameter(column).setParameterValueNotifyChangedAll(row, val);
-    }
+//    public void setParameterValueNotifyChangedAll(int row, int column, double val){
+//        getParameter(column).setParameterValueNotifyChangedAll(row, val);
+//    }
 
     protected void storeValues() {
         for (Parameter parameter : uniqueParameters) {
diff --git a/src/dr/inference/model/DesignMatrix.java b/src/dr/inference/model/DesignMatrix.java
index 1675dd2..cac4a61 100644
--- a/src/dr/inference/model/DesignMatrix.java
+++ b/src/dr/inference/model/DesignMatrix.java
@@ -68,9 +68,9 @@ public class DesignMatrix extends MatrixParameter {
         return value;
     }
 
-    public double getParameterValue(int index) {
-        throw new RuntimeException("Univariate value from a design matrix");
-    }
+//    public double getParameterValue(int index) {
+//        throw new RuntimeException("Univariate value from a design matrix");
+//    }
 
     public void addParameter(Parameter param) {
         super.addParameter(param);
@@ -202,7 +202,8 @@ public class DesignMatrix extends MatrixParameter {
                     if (i == 0)
                         dim = parameter.getDimension();
                     else if (dim != parameter.getDimension())
-                        throw new XMLParseException("All parameters must have the same dimension to construct a rectangular design matrix");
+                        throw new XMLParseException("Parameter " + (i+1) +" has dimension "+ parameter.getDimension()+ " and not "+dim+". "+
+                                "All parameters must have the same dimension to construct a rectangular design matrix");
                 }
             }
 
diff --git a/src/dr/inference/model/DiagonalMatrix.java b/src/dr/inference/model/DiagonalMatrix.java
index 215c8a6..05ef415 100644
--- a/src/dr/inference/model/DiagonalMatrix.java
+++ b/src/dr/inference/model/DiagonalMatrix.java
@@ -47,6 +47,8 @@ public class DiagonalMatrix extends MatrixParameter {
 //		Parameter.Default(name, parameters);
 //	}
 
+    public Parameter getDiagonalParameter() { return diagonalParameter; }
+
     public double getParameterValue(int row, int col) {
         if (row != col)
             return 0.0;
diff --git a/src/dr/inference/model/FastMatrixParameter.java b/src/dr/inference/model/FastMatrixParameter.java
new file mode 100644
index 0000000..af04ed3
--- /dev/null
+++ b/src/dr/inference/model/FastMatrixParameter.java
@@ -0,0 +1,276 @@
+/*
+ * FastMatrixParameter.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class FastMatrixParameter extends CompoundParameter implements MatrixParameterInterface {
+
+    public static final String FAST_MATRIX_PARAMETER = "fastMatrixParameter";
+    public static final String ROW_DIMENSION = MatrixParameter.ROW_DIMENSION;
+    public static final String COLUMN_DIMENSION = MatrixParameter.COLUMN_DIMENSION;
+
+    public FastMatrixParameter(String id, int rowDimension, int colDimension) {
+        super(id);
+        singleParameter = new Parameter.Default(rowDimension * colDimension);
+        addParameter(singleParameter);
+
+        this.rowDimension = rowDimension;
+        this.colDimension = colDimension;
+    }
+
+    public Parameter getParameter(int index) {
+        if (proxyList == null) {
+            proxyList = new ArrayList<ParameterProxy>(colDimension);
+            for (int i = 0; i < colDimension; ++i) {
+                proxyList.add(new ParameterProxy(this, i));
+            }
+        }
+        return proxyList.get(index);
+    }
+
+    class ParameterProxy extends Parameter.Abstract {
+
+        private final int column;
+        private final FastMatrixParameter matrix;
+
+        ParameterProxy(FastMatrixParameter matrix, int column) {
+            this.matrix = matrix;
+            this.column = column;
+        }
+
+        @Override
+        protected void storeValues() {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        protected void restoreValues() {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        protected void acceptValues() {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        protected void adoptValues(Parameter source) {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        public double getParameterValue(int dim) {
+            return matrix.getParameterValue(dim, column);
+        }
+
+        @Override
+        public void setParameterValue(int dim, double value) {
+            matrix.setParameterValue(dim, column, value);
+        }
+
+        @Override
+        public void setParameterValueQuietly(int dim, double value) {
+            matrix.setParameterValueQuietly(dim, column, value);
+        }
+
+        @Override
+        public void setParameterValueNotifyChangedAll(int dim, double value) {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        public String getParameterName() {
+            return getId();
+        }
+
+        @Override
+        public void addBounds(Bounds<Double> bounds) {
+            matrix.getUniqueParameter(0).addBounds(bounds);
+        }
+
+        @Override
+        public Bounds<Double> getBounds() {
+            return matrix.getUniqueParameter(0).getBounds();
+        }
+
+        @Override
+        public void addDimension(int index, double value) {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        public double removeDimension(int index) {
+            throw new RuntimeException("Do not call");
+        }
+
+        @Override
+        public int getDimension() {
+            return rowDimension;
+        }
+    }
+
+    private final int index(int row, int col) {
+        // column-major
+        return col * rowDimension + row;
+    }
+
+    @Override
+    public double getParameterValue(int row, int col) {
+        return singleParameter.getParameterValue(index(row, col));
+    }
+
+    @Override
+    public double[] getParameterValues() {
+        double[] destination = new double[getDimension()];
+        copyParameterValues(destination, 0);
+        return destination;
+    }
+
+    @Override
+    public void copyParameterValues(double[] destination, int offset) {
+        final double[] source = ((Parameter.Default) singleParameter).inspectParameterValues();
+        System.arraycopy(source, 0, destination, offset, source.length);
+    }
+
+    @Override
+    public void setAllParameterValuesQuietly(double[] values, int offset) {
+        final double[] destination = ((Parameter.Default) singleParameter).inspectParameterValues();
+        System.arraycopy(values, offset, destination, 0, destination.length);
+    }
+
+    @Override
+    public void setParameterValue(int row, int col, double value) {
+        singleParameter.setParameterValue(index(row, col), value);
+    }
+
+    @Override
+    public void setParameterValueQuietly(int row, int col, double value) {
+        singleParameter.setParameterValueQuietly(index(row, col), value);
+    }
+
+    @Override
+    public void setParameterValueNotifyChangedAll(int row, int col, double value) {
+        singleParameter.setParameterValueNotifyChangedAll(index(row, col), value);
+    }
+
+    @Override
+    public double[] getColumnValues(int col) {
+        double[] rtn = new double[rowDimension];
+        for (int i = 0; i < rowDimension; ++i) {
+            rtn[i] = getParameterValue(i, col);
+        }
+        return rtn;
+    }
+
+    @Override
+    public double[][] getParameterAsMatrix() {
+        double[][] rtn = new double[rowDimension][colDimension];
+        for (int j = 0; j < colDimension; ++j) {
+            for (int i = 0; i < rowDimension; ++i) {
+                rtn[i][j] = getParameterValue(i, j);
+            }
+        }
+        return rtn;
+    }
+
+    @Override
+    public int getColumnDimension() {
+        return colDimension;
+    }
+
+    @Override
+    public int getRowDimension() {
+        return rowDimension;
+    }
+
+    @Override
+    public int getParameterCount() {
+        return getColumnDimension();
+    }
+
+    @Override
+    public int getUniqueParameterCount() {
+        return 1;
+    }
+
+    @Override
+    public Parameter getUniqueParameter(int index) {
+        return super.getParameter(0);
+    }
+
+    private final int rowDimension;
+    private final int colDimension;
+    private final Parameter singleParameter;
+
+    private List<ParameterProxy> proxyList = null;
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public String getParserName() {
+            return FAST_MATRIX_PARAMETER;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            final String name = xo.hasId() ? xo.getId() : null;
+            final int rowDimension = xo.getIntegerAttribute(ROW_DIMENSION);
+            final int colDimension = xo.getIntegerAttribute(COLUMN_DIMENSION);
+
+            FastMatrixParameter matrixParameter = new FastMatrixParameter(name, rowDimension, colDimension);
+
+            return matrixParameter;
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "A fast matrix parameter constructed from a single parameter.";
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                new ElementRule(Parameter.class, 0, Integer.MAX_VALUE),
+                AttributeRule.newIntegerRule(ROW_DIMENSION, false),
+                AttributeRule.newIntegerRule(COLUMN_DIMENSION, false),
+        };
+
+        public Class getReturnType() {
+            return FastMatrixParameter.class;
+        }
+    };
+}
diff --git a/src/dr/inference/model/LatentFactorModel.java b/src/dr/inference/model/LatentFactorModel.java
index 0c508e8..7dd994a 100644
--- a/src/dr/inference/model/LatentFactorModel.java
+++ b/src/dr/inference/model/LatentFactorModel.java
@@ -1,7 +1,7 @@
 /*
  * LatentFactorModel.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -30,6 +30,8 @@ import dr.util.Citable;
 import dr.util.Citation;
 
 import java.util.List;
+import java.util.ListIterator;
+import java.util.Vector;
 
 
 /**
@@ -59,15 +61,17 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
     private boolean newModel;
 
     private boolean likelihoodKnown = false;
-    private boolean isDataScaled=false;
+    private boolean isDataScaled = false;
     private boolean storedLikelihoodKnown;
-    private boolean residualKnown=false;
-    private boolean LxFKnown=false;
-    private boolean storedResidualKnown=false;
+    private boolean residualKnown = false;
+    private boolean dataKnown = false;
+    private boolean storedDataKnown;
+    private boolean LxFKnown = false;
+    private boolean storedResidualKnown = false;
     private boolean storedLxFKnown;
-    private boolean traceKnown=false;
+    private boolean traceKnown = false;
     private boolean storedTraceKnown;
-    private boolean logDetColKnown=false;
+    private boolean logDetColKnown = false;
     private boolean storedLogDetColKnown;
     private double trace;
     private double storedTrace;
@@ -77,24 +81,41 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
     private double storedLogDetCol;
     private boolean[][] changed;
     private boolean[][] storedChanged;
+    private boolean RecomputeResiduals;
+    private boolean RecomputeFactors;
+    private boolean RecomputeLoadings;
+    private Vector<Integer> changedValues;
+    private Vector<Integer> storedChangedValues;
+    private boolean factorsKnown = false;
+    private boolean storedFactorsKnown = false;
+    private boolean loadingsKnown = false;
+    private boolean storedLoadingsKnown = false;
 
     private double[] residual;
     private double[] LxF;
     private double[] storedResidual;
     private double[] storedLxF;
 
-    private double pathParameter=1.0;
+    private double pathParameter = 1.0;
 
     public LatentFactorModel(MatrixParameter data, MatrixParameter factors, MatrixParameter loadings,
                              DiagonalMatrix rowPrecision, DiagonalMatrix colPrecision,
-                             boolean scaleData, Parameter continuous, boolean newModel
+                             boolean scaleData, Parameter continuous, boolean newModel, boolean recomputeResiduals, boolean recomputeFactors, boolean recomputeLoadings
     ) {
         super("");
+        this.RecomputeResiduals = recomputeResiduals;
+        this.RecomputeFactors = recomputeFactors;
+        this.RecomputeLoadings = recomputeLoadings;
+        changedValues = new Vector<Integer>();
+        for (int i = 0; i < data.getDimension(); i++) {
+            changedValues.add(i);
+        }
+        storedChangedValues = new Vector<Integer>();
 //        data = new Matrix(dataIn.getParameterAsMatrix());
 //        factors = new Matrix(factorsIn.getParameterAsMatrix());
 //        loadings = new Matrix(loadingsIn.getParameterAsMatrix());
-        this.newModel=newModel;
-        this.scaleData=scaleData;
+        this.newModel = newModel;
+        this.scaleData = scaleData;
         this.data = data;
         this.factors = factors;
         // Put default bounds on factors
@@ -103,7 +124,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
             System.err.println(p.getId() + " " + p.getDimension());
             p.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, p.getDimension()));
         }
-        this.continuous=continuous;
+        this.continuous = continuous;
 
         this.loadings = loadings;
 
@@ -118,12 +139,12 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //        loadings.addBounds();
 
 
-        changed=new boolean[loadings.getRowDimension()][factors.getColumnDimension()];
-        storedChanged=new boolean[loadings.getRowDimension()][factors.getColumnDimension()];
+        changed = new boolean[loadings.getRowDimension()][factors.getColumnDimension()];
+        storedChanged = new boolean[loadings.getRowDimension()][factors.getColumnDimension()];
 
-        for (int i = 0; i <loadings.getRowDimension() ; i++) {
-            for (int j = 0; j <factors.getColumnDimension() ; j++) {
-                changed[i][j]=true;
+        for (int i = 0; i < loadings.getRowDimension(); i++) {
+            for (int j = 0; j < factors.getColumnDimension(); j++) {
+                changed[i][j] = true;
             }
         }
 
@@ -163,21 +184,21 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
             throw new RuntimeException("MUST HAVE FEWER FACTORS THAN DATA POINTS\n");
         }
 
-        residual=new double[loadings.getRowDimension()*factors.getColumnDimension()];
-        LxF=new double[loadings.getRowDimension()*factors.getColumnDimension()];
-        storedResidual=new double[residual.length];
-        storedLxF=new double[LxF.length];
+        residual = new double[loadings.getRowDimension() * factors.getColumnDimension()];
+        LxF = new double[loadings.getRowDimension() * factors.getColumnDimension()];
+        storedResidual = new double[residual.length];
+        storedLxF = new double[LxF.length];
 
-        if(!isDataScaled & !scaleData){
-            sData=this.data;
-            isDataScaled=true;
+        if (!isDataScaled & !scaleData) {
+            sData = this.data;
+            isDataScaled = true;
         }
-        if(!isDataScaled){
+        if (!isDataScaled) {
             sData = computeScaledData();
-            isDataScaled=true;
-            for (int i = 0; i <sData.getRowDimension() ; i++) {
-                for (int j = 0; j <sData.getColumnDimension() ; j++) {
-                        this.data.setParameterValue(i,j,sData.getParameterValue(i,j));
+            isDataScaled = true;
+            for (int i = 0; i < sData.getRowDimension(); i++) {
+                for (int j = 0; j < sData.getColumnDimension(); j++) {
+                    this.data.setParameterValue(i, j, sData.getParameterValue(i, j));
 //                    System.out.println(this.data.getParameterValue(i,j));
                 }
 
@@ -185,22 +206,23 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
             data.fireParameterChangedEvent();
         }
 
-        double sum=0;
-        for(int i=0; i<sData.getRowDimension(); i++){
-            for (int j = 0; j <sData.getColumnDimension() ; j++) {
-                if(continuous.getParameterValue(i)==0 && sData.getParameterValue(i,j)!=0)
-                {sum+=-.5*Math.log(2*StrictMath.PI)-.5*sData.getParameterValue(i,j)*sData.getParameterValue(i,j);}
+        double sum = 0;
+        for (int i = 0; i < sData.getRowDimension(); i++) {
+            for (int j = 0; j < sData.getColumnDimension(); j++) {
+                if (continuous.getParameterValue(i) == 0 && sData.getParameterValue(i, j) != 0) {
+                    sum += -.5 * Math.log(2 * StrictMath.PI) - .5 * sData.getParameterValue(i, j) * sData.getParameterValue(i, j);
+                }
             }
         }
-        System.out.println("Constant Value for Path Sampling (normal 0,1): " + -1*sum);
+        System.out.println("Constant Value for Path Sampling (normal 0,1): " + -1 * sum);
 
-       computeResiduals();
+        computeResiduals();
 //        System.out.print(new Matrix(residual.toComponents()));
 //        System.out.print(calculateLogLikelihood());
     }
 
 
-//    public Matrix getData(){
+    //    public Matrix getData(){
 //        Matrix ans=data;
 //        return ans;
 //    }
@@ -219,16 +241,24 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //        Matrix ans=residual;
 //        return ans;
 //    }
-    public MatrixParameter getFactors(){return factors;}
+    public MatrixParameter getFactors() {
+        return factors;
+    }
 
-    public MatrixParameter getColumnPrecision(){return colPrecision;}
+    public MatrixParameter getColumnPrecision() {
+        return colPrecision;
+    }
 
-    public MatrixParameter getLoadings(){return loadings;}
+    public MatrixParameter getLoadings() {
+        return loadings;
+    }
 
-    public MatrixParameter getData(){return data;}
+    public MatrixParameter getData() {
+        return data;
+    }
 
-    public Parameter returnIntermediate(){
-        if(!residualKnown && checkLoadings()){
+    public Parameter returnIntermediate() {
+        if (!residualKnown && checkLoadings()) {
             computeResiduals();
         }
         return data;
@@ -243,72 +273,112 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //    }
 
 
-    public MatrixParameter getScaledData(){return data;}
+    public MatrixParameter getScaledData() {
+        return data;
+    }
 
-    public Parameter getContinuous(){return continuous;}
+    public Parameter getContinuous() {
+        return continuous;
+    }
 
-    public int getFactorDimension(){return factors.getRowDimension();}
+    public int getFactorDimension() {
+        return factors.getRowDimension();
+    }
 
-    private void Multiply(MatrixParameter Left, MatrixParameter Right, double[] answer){
-        int dim=Left.getColumnDimension();
-        int n=Left.getRowDimension();
-        int p=Right.getColumnDimension();
+    public double[] getResidual() {
+        computeResiduals();
+        return residual;
+    }
 
-        for (int i = 0; i < n; i++) {
-            for (int j = 0; j < p; j++) {
-                if((changed[i][j]==true && continuous.getParameterValue(i)!=0) || newModel){
-                double sum = 0;
-                for (int k = 0; k < dim; k++)
-                    sum += Left.getParameterValue(i, k) * Right.getParameterValue(k,j);
-                answer[i*p+j]=sum;
-                //changed[i][j]=false;
+    private void Multiply(MatrixParameter Left, MatrixParameter Right, double[] answer) {
+        int dim = Left.getColumnDimension();
+        int n = Left.getRowDimension();
+        int p = Right.getColumnDimension();
+
+        if ((factorsKnown == false && !RecomputeFactors) || (!dataKnown && !RecomputeResiduals) || (!loadingsKnown && !RecomputeLoadings)) {
+            double sum;
+            ListIterator<Integer> li = changedValues.listIterator();
+            while (li.hasNext()) {
+                int index = li.next();
+                int i = index % n;
+                int j = index / n;
+
+                sum = 0;
+                for (int k = 0; k < dim; k++) {
+//                System.out.println(data.getColumnDimension());
+//                System.out.println(index);
+                    sum += Left.getParameterValue(i, k) *
+                            Right.getParameterValue(k, j);
+                }
+                answer[i * p + j] = sum;
+            }
+        } else {
+            for (int i = 0; i < n; i++) {
+                for (int j = 0; j < p; j++) {
+                    if ((changed[i][j] == true && continuous.getParameterValue(i) != 0) || newModel) {
+                        double sum = 0;
+                        for (int k = 0; k < dim; k++)
+                            sum += Left.getParameterValue(i, k) * Right.getParameterValue(k, j);
+                        answer[i * p + j] = sum;
+                        //changed[i][j]=false;
+                    }
                 }
             }
         }
     }
 
-    private void add(MatrixParameter Left, MatrixParameter Right, double[] answer){
-        int row=Left.getRowDimension();
-        int col=Left.getColumnDimension();
-        for (int i = 0; i <row ; i++) {
+    private void add(MatrixParameter Left, MatrixParameter Right, double[] answer) {
+        int row = Left.getRowDimension();
+        int col = Left.getColumnDimension();
+        for (int i = 0; i < row; i++) {
             for (int j = 0; j < col; j++) {
-                answer[i*col+j]=Left.getParameterValue(i,j)+Right.getParameterValue(i,j);
+                answer[i * col + j] = Left.getParameterValue(i, j) + Right.getParameterValue(i, j);
             }
 
         }
     }
 
-    private void subtract(MatrixParameter Left, double[] Right, double[] answer){
-        int row=Left.getRowDimension();
-        int col=Left.getColumnDimension();
-        for (int i = 0; i <row ; i++) {
-            if(continuous.getParameterValue(i)!=0 ||newModel){
-                for (int j = 0; j < col; j++) {
-                       answer[i*col+j]=Left.getParameterValue(i,j)-Right[i*col+j];
-                }
+    private void subtract(MatrixParameter Left, double[] Right, double[] answer) {
+        int row = Left.getRowDimension();
+        int col = Left.getColumnDimension();
+        if ((!RecomputeResiduals && !dataKnown) || (!RecomputeFactors && !factorsKnown) || (!RecomputeLoadings && !loadingsKnown)) {
+            while (!changedValues.isEmpty()) {
+                int id = changedValues.remove(0);
+                int tcol = id / row;
+                int trow = id % row;
+//                System.out.println(Left.getParameterValue(id)==Left.getParameterValue(tcol,trow));
+                answer[trow * col + tcol] = Left.getParameterValue(id) - Right[trow * col + tcol];
             }
-//            else{
-//                for (int j = 0; j <col; j++) {
-//                    Left.setParameterValueQuietly(i,j, Right[i*col+j]);
+        } else {
+            for (int i = 0; i < row; i++) {
+                if (continuous.getParameterValue(i) != 0 || newModel) {
+                    for (int j = 0; j < col; j++) {
+                        answer[i * col + j] = Left.getParameterValue(i, j) - Right[i * col + j];
+                    }
+                }
+//              else{
+//                  for (int j = 0; j <col; j++) {
+//                        Left.setParameterValueQuietly(i,j, Right[i*col+j]);
+//                  }
+//                    containsDiscrete=true;
 //                }
-//                containsDiscrete=true;
-//            }
 
+            }
         }
 //        if(containsDiscrete){
 //            Left.fireParameterChangedEvent();}
     }
 
-    private double TDTTrace(double[] array, DiagonalMatrix middle){
-        int innerDim=middle.getRowDimension();
-        int outerDim=array.length/innerDim;
-        double sum=0;
-        for (int j = 0; j <innerDim ; j++){
-            if(continuous.getParameterValue(j)!=0 || newModel) {
+    private double TDTTrace(double[] array, DiagonalMatrix middle) {
+        int innerDim = middle.getRowDimension();
+        int outerDim = array.length / innerDim;
+        double sum = 0;
+        for (int j = 0; j < innerDim; j++) {
+            if (continuous.getParameterValue(j) != 0 || newModel) {
                 for (int i = 0; i < outerDim; i++) {
-                        double s1 = array[j * outerDim + i];
-                        double s2 = middle.getParameterValue(j, j);
-                        sum += s1 * s1 * s2;
+                    double s1 = array[j * outerDim + i];
+                    double s2 = middle.getParameterValue(j, j);
+                    sum += s1 * s1 * s2;
                 }
             }
         }
@@ -316,60 +386,59 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
     }
 
 
-
-    private MatrixParameter computeScaledData(){
-        MatrixParameter answer=new MatrixParameter(data.getParameterName() + ".scaled");
+    private MatrixParameter computeScaledData() {
+        MatrixParameter answer = new MatrixParameter(data.getParameterName() + ".scaled");
         answer.setDimensions(data.getRowDimension(), data.getColumnDimension());
- //       Matrix answer=new Matrix(data.getRowDimension(), data.getColumnDimension());
-        double[][] aData=data.getParameterAsMatrix();
-        double[] meanList=new double[data.getRowDimension()];
-        double[] varList=new double[data.getRowDimension()];
-        double[] count=new double[data.getRowDimension()];
-        for(int i=0; i<data.getColumnDimension(); i++){
-            for (int j=0; j<data.getRowDimension(); j++){
-                if(data.getParameterValue(j,i)!=0) {
+        //       Matrix answer=new Matrix(data.getRowDimension(), data.getColumnDimension());
+        double[][] aData = data.getParameterAsMatrix();
+        double[] meanList = new double[data.getRowDimension()];
+        double[] varList = new double[data.getRowDimension()];
+        double[] count = new double[data.getRowDimension()];
+        for (int i = 0; i < data.getColumnDimension(); i++) {
+            for (int j = 0; j < data.getRowDimension(); j++) {
+                if (data.getParameterValue(j, i) != 0) {
                     meanList[j] += data.getParameterValue(j, i);
                     count[j]++;
                 }
             }
         }
-        for(int i=0; i<data.getRowDimension(); i++){
-            if(continuous.getParameterValue(i)==1)
-                meanList[i]=meanList[i]/count[i];
+        for (int i = 0; i < data.getRowDimension(); i++) {
+            if (continuous.getParameterValue(i) == 1)
+                meanList[i] = meanList[i] / count[i];
             else
-                meanList[i]=0;
+                meanList[i] = 0;
         }
 
-        double[][] answerTemp=new double[data.getRowDimension()][data.getColumnDimension()];
-        for(int i=0; i<data.getColumnDimension(); i++){
-            for(int j=0; j<data.getRowDimension(); j++){
-                if(aData[j][i]!=0) {
+        double[][] answerTemp = new double[data.getRowDimension()][data.getColumnDimension()];
+        for (int i = 0; i < data.getColumnDimension(); i++) {
+            for (int j = 0; j < data.getRowDimension(); j++) {
+                if (aData[j][i] != 0) {
                     answerTemp[j][i] = aData[j][i] - meanList[j];
                 }
             }
         }
 //        System.out.println(new Matrix(answerTemp));
 
-        for(int i=0; i<data.getColumnDimension(); i++){
-            for(int j=0; j<data.getRowDimension(); j++){
-                varList[j]+=answerTemp[j][i]*answerTemp[j][i];
+        for (int i = 0; i < data.getColumnDimension(); i++) {
+            for (int j = 0; j < data.getRowDimension(); j++) {
+                varList[j] += answerTemp[j][i] * answerTemp[j][i];
             }
         }
 
-        for(int i=0; i<data.getRowDimension(); i++){
-            if(continuous.getParameterValue(i)==1){
-            varList[i]=varList[i]/(count[i]-1);
-            varList[i]=StrictMath.sqrt(varList[i]);}
-            else{
-                varList[i]=1;
+        for (int i = 0; i < data.getRowDimension(); i++) {
+            if (continuous.getParameterValue(i) == 1) {
+                varList[i] = varList[i] / (count[i] - 1);
+                varList[i] = StrictMath.sqrt(varList[i]);
+            } else {
+                varList[i] = 1;
             }
         }
 //        System.out.println(data.getColumnDimension());
 //        System.out.println(data.getRowDimension());
 
-        for(int i=0; i<data.getColumnDimension(); i++){
-            for(int j=0; j<data.getRowDimension(); j++){
-                answer.setParameterValue(j,i, answerTemp[j][i]/varList[j]);
+        for (int i = 0; i < data.getColumnDimension(); i++) {
+            for (int j = 0; j < data.getRowDimension(); j++) {
+                answer.setParameterValue(j, i, answerTemp[j][i] / varList[j]);
             }
         }
 //        System.out.println(new Matrix(answerTemp));
@@ -385,14 +454,17 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //    LxFKnown=false;
 
 
-
 //        if(firstTime || (!factorVariablesChanged.empty() && !loadingVariablesChanged.empty())){
-    if(!LxFKnown){
-    Multiply(loadings, factors, LxF);
-        LxFKnown=true;
-    }
+        if (!LxFKnown) {
+            Multiply(loadings, factors, LxF);
+
+        }
         subtract(data, LxF, residual);
-        residualKnown=true;
+        LxFKnown = true;
+        residualKnown = true;
+        factorsKnown = true;
+        loadingsKnown = true;
+        dataKnown = true;
 //        firstTime=false;}
 //        else{
 //            while(!factorVariablesChanged.empty()){
@@ -418,21 +490,28 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
      */
     @Override
     protected void storeState() {
-        data.storeParameterValues();
-        loadings.storeValues();
-        factors.storeValues();
+//        data.storeParameterValues();
+//        loadings.storeValues();
+//        factors.storeValues();
         storedLogLikelihood = logLikelihood;
         storedLikelihoodKnown = likelihoodKnown;
-        storedLogDetColKnown=logDetColKnown;
-        storedLogDetCol=logDetCol;
-        storedTrace=trace;
-        storedTraceKnown=traceKnown;
-        storedResidualKnown=residualKnown;
-        storedLxFKnown=LxFKnown;
+        storedLogDetColKnown = logDetColKnown;
+        storedLogDetCol = logDetCol;
+        storedTrace = trace;
+        storedTraceKnown = traceKnown;
+        storedResidualKnown = residualKnown;
+        storedLxFKnown = LxFKnown;
+        storedFactorsKnown = factorsKnown;
+        storedLoadingsKnown = loadingsKnown;
+        storedDataKnown = dataKnown;
         System.arraycopy(residual, 0, storedResidual, 0, residual.length);
 
         System.arraycopy(LxF, 0, storedLxF, 0, residual.length);
         System.arraycopy(changed, 0, storedChanged, 0, changed.length);
+//        for (int i = 0; i <changedValues.size() ; i++) {
+//            storedChangedValues.addElement(changedValues.elementAt(i));    ;
+//        }
+
 
     }
 
@@ -443,28 +522,33 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
      */
     @Override
     protected void restoreState() {
-        changed=storedChanged;
-        data.restoreParameterValues();
-        loadings.restoreValues();
-        factors.restoreValues();
+        changed = storedChanged;
+//        data.restoreParameterValues();
+//        loadings.restoreValues();
+//        factors.restoreValues();
         logLikelihood = storedLogLikelihood;
         likelihoodKnown = storedLikelihoodKnown;
-        trace=storedTrace;
-        traceKnown=storedTraceKnown;
-        residualKnown=storedResidualKnown;
-        LxFKnown=storedLxFKnown;
-        residual=storedResidual;
-        storedResidual=new double[residual.length];
-        LxF=storedLxF;
-        storedLxF=new double[LxF.length];
-        logDetCol=storedLogDetCol;
-        logDetColKnown=storedLogDetColKnown;
+        trace = storedTrace;
+        traceKnown = storedTraceKnown;
+        residualKnown = storedResidualKnown;
+        LxFKnown = storedLxFKnown;
+        double[] temp = residual;
+        residual = storedResidual;
+        storedResidual = temp;
+        temp = LxF;
+        LxF = storedLxF;
+        storedLxF = temp;
+        logDetCol = storedLogDetCol;
+        logDetColKnown = storedLogDetColKnown;
+        factorsKnown = storedFactorsKnown;
+        loadingsKnown = storedLoadingsKnown;
+        dataKnown = storedDataKnown;
+//        changedValues=storedChangedValues;
+//        storedChangedValues=new Vector<Integer>();
 
 //        System.out.println(data.getParameterValue(10, 19));
 
 
-
-
 //        int index=0;
 //        for (int i = 0; i <continuous.getDimension() ; i++) {
 //            if(continuous.getParameterValue(i)==0){
@@ -495,32 +579,52 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
      */
     @Override
     protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
-        if(variable==getScaledData()){
-            residualKnown=false;
-            traceKnown=false;
-            likelihoodKnown=false;
+        if (variable == getScaledData()) {
+            residualKnown = false;
+            traceKnown = false;
+            likelihoodKnown = false;
+            if (!RecomputeResiduals) {
+                if (index != -1)
+                    changedValues.add(index);
+                dataKnown = false;
+            }
         }
-        if(variable==factors){
-
-
+        if (variable == factors) {
 
 
 //            for (int i = 0; i <loadings.getRowDimension() ; i++) {
 //                changed[i][index/factors.getRowDimension()]=true;
 //            }
 
+            if (!RecomputeFactors) {
+                factorsKnown = false;
+                int row = index / factors.getRowDimension();
+                if (index != -1)
+                    for (int i = 0; i < data.getRowDimension(); i++) {
+                        changedValues.add(row * data.getRowDimension() + i);
+                    }
 
+            }
 
 
 //            factorVariablesChanged.push(index);
 
 
-            LxFKnown=false;
-            residualKnown=false;
-            traceKnown=false;
+            LxFKnown = false;
+            residualKnown = false;
+            traceKnown = false;
             likelihoodKnown = false;
         }
-        if(variable==loadings){
+        if (variable == loadings) {
+            if (!RecomputeLoadings) {
+                loadingsKnown = false;
+                int col = index % loadings.getRowDimension();
+                if (index != -1) {
+                    for (int i = 0; i < data.getColumnDimension(); i++) {
+                        changedValues.add(i * data.getRowDimension() + col);
+                    }
+                }
+            }
 //            System.out.println("Loadings Changed");
 //            System.out.println(index);
 //            System.out.println(index/loadings.getRowDimension());
@@ -534,14 +638,14 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //            factorVariablesChanged.push(index);
 
 
-            LxFKnown=false;
-            residualKnown=false;
-            traceKnown=false;
+            LxFKnown = false;
+            residualKnown = false;
+            traceKnown = false;
             likelihoodKnown = false;
         }
-        if(variable==colPrecision){
-            logDetColKnown=false;
-            traceKnown=false;
+        if (variable == colPrecision) {
+            logDetColKnown = false;
+            traceKnown = false;
             likelihoodKnown = false;
         }
 
@@ -572,7 +676,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
      */
     @Override
     public double getLogLikelihood() {
-       likelihoodKnown=false;
+        likelihoodKnown = false;
         if (!likelihoodKnown) {
             logLikelihood = calculateLogLikelihood();
             likelihoodKnown = true;
@@ -588,11 +692,9 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         likelihoodKnown = false;
     }
 
-    private boolean checkLoadings(){
-        for(int i=0; i<StrictMath.min(loadings.getRowDimension(),loadings.getColumnDimension()); i++)
-        {
-            if(loadings.getParameterValue(i,i)<0)
-            {
+    private boolean checkLoadings() {
+        for (int i = 0; i < StrictMath.min(loadings.getRowDimension(), loadings.getColumnDimension()); i++) {
+            if (loadings.getParameterValue(i, i) < 0) {
                 return false;
             }
         }
@@ -611,31 +713,30 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 
 
 //        residualKnown=false;
-        if(!residualKnown){
+        if (!residualKnown) {
             computeResiduals();
         }
 
 
-
 //        expPart = residual.productInPlace(rowPrecision.productInPlace(residual.transposeThenProductInPlace(colPrecision, TResidualxC), RxTRxC), expPart);
 //            logDetRow=StrictMath.log(rowPrecision.getDeterminant());
- //       logDetColKnown=false;
-        if(!logDetColKnown){
-            logDetColKnown=true;
-            double product=1;
-            for (int i = 0; i <colPrecision.getRowDimension() ; i++) {
-                if (continuous.getParameterValue(i)!=0)
-                    product*=colPrecision.getParameterValue(i,i);
+        //       logDetColKnown=false;
+        if (!logDetColKnown) {
+            logDetColKnown = true;
+            double product = 1;
+            for (int i = 0; i < colPrecision.getRowDimension(); i++) {
+                if (continuous.getParameterValue(i) != 0)
+                    product *= colPrecision.getParameterValue(i, i);
             }
 
-            logDetCol=StrictMath.log(product);
+            logDetCol = StrictMath.log(product);
         }
 //            System.out.println(logDetCol);
 //            System.out.println(logDetRow);
 //        traceKnown=false;
-        if(!traceKnown){
-            traceKnown=true;
-            trace=TDTTrace(residual, colPrecision);
+        if (!traceKnown) {
+            traceKnown = true;
+            trace = TDTTrace(residual, colPrecision);
         }
 //        if(expPart.getRowDimension()!=expPart.getColumnDimension())
 //        {
@@ -651,9 +752,9 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //        System.out.println(expPart);
 
 
-       return -.5*trace + .5*data.getColumnDimension()*logDetCol
+        return -.5 * trace + .5 * data.getColumnDimension() * logDetCol + .5 * data.getRowDimension()
 
-               -.5*data.getRowDimension()*data.getColumnDimension()*Math.log(2.0 * StrictMath.PI);
+                - .5 * data.getRowDimension() * data.getColumnDimension() * Math.log(2.0 * StrictMath.PI);
     }
 
 //    public void setPathParameter(double beta){
diff --git a/src/dr/inference/model/Likelihood.java b/src/dr/inference/model/Likelihood.java
index 4f5d97e..4842bc3 100644
--- a/src/dr/inference/model/Likelihood.java
+++ b/src/dr/inference/model/Likelihood.java
@@ -29,6 +29,7 @@ import dr.inference.loggers.Loggable;
 import dr.util.Identifiable;
 
 import java.io.Serializable;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -65,6 +66,12 @@ public interface Likelihood extends Loggable, Identifiable {
      */
     String prettyName();
 
+	/**
+	 * Get the set of sub-component likelihoods that this likelihood uses
+	 * @return
+	 */
+	Set<Likelihood> getLikelihoodSet();
+
     /**
      * @return is the likelihood used in the MCMC?
      */
@@ -132,6 +139,10 @@ public interface Likelihood extends Loggable, Identifiable {
 
 		protected abstract double calculateLogLikelihood();
 
+		public Set<Likelihood> getLikelihoodSet() {
+			return new HashSet<Likelihood>(Arrays.asList(this));
+		}
+
 		public String toString() {
             // don't call any "recalculating" stuff like getLogLikelihood() in toString -
             // this interferes with the debugger.
@@ -211,5 +222,6 @@ public interface Likelihood extends Loggable, Identifiable {
 
     // set to store all created likelihoods
     final static Set<Likelihood> FULL_LIKELIHOOD_SET = new HashSet<Likelihood>();
+	final static Set<Likelihood> CONNECTED_LIKELIHOOD_SET = new HashSet<Likelihood>();
 
 }
diff --git a/src/dr/inference/model/MatrixParameter.java b/src/dr/inference/model/MatrixParameter.java
index 8432e46..fc6b49f 100644
--- a/src/dr/inference/model/MatrixParameter.java
+++ b/src/dr/inference/model/MatrixParameter.java
@@ -33,7 +33,7 @@ import java.util.StringTokenizer;
  * @author Marc Suchard
  * @author Max Tolkoff
  */
-public class MatrixParameter extends CompoundParameter {
+public class MatrixParameter extends CompoundParameter implements MatrixParameterInterface {
 
     public final static String MATRIX_PARAMETER = "matrixParameter";
 
@@ -58,6 +58,17 @@ public class MatrixParameter extends CompoundParameter {
         setDimensions(row, column, a);
     }
 
+    public void setParameterValue(int row, int column, double a) {
+        getParameter(column).setParameterValue(row, a);
+    }
+
+    public void setParameterValueQuietly(int row, int column, double a){
+        getParameter(column).setParameterValueQuietly(row, a);
+    }
+
+    public void setParameterValueNotifyChangedAll(int row, int column, double val){
+        getParameter(column).setParameterValueNotifyChangedAll(row, val);
+    }
 
     public static MatrixParameter recast(String name, CompoundParameter compoundParameter) {
         final int count = compoundParameter.getParameterCount();
@@ -81,7 +92,7 @@ public class MatrixParameter extends CompoundParameter {
         return rowValues;
     }
 
-    public double[] getColumnValues(int col){
+    public double[] getColumnValues(int col) {
         return this.getParameter(col).getParameterValues();
     }
 
@@ -148,6 +159,31 @@ public class MatrixParameter extends CompoundParameter {
         return getParameter(0).getDimension();
     }
 
+    @Override
+    public int getUniqueParameterCount() {
+        return getParameterCount();
+    }
+
+    @Override
+    public Parameter getUniqueParameter(int index) {
+        return super.getParameter(index);
+    }
+
+    @Override
+    public void copyParameterValues(double[] destination, int offset) {
+        final int length = getDimension();
+        for (int i = 0; i < length; ++i) {
+            destination[offset + i] = getParameterValue(i);
+        }
+    }
+
+    @Override
+    public void setAllParameterValuesQuietly(double[] values, int offset) {
+        for (int i = 0; i < getDimension(); ++i) {
+            setParameterValueQuietly(i, values[offset + i]);
+        }
+    }
+
     public String toSymmetricString() {
         StringBuilder sb = new StringBuilder("{");
         int dim = getRowDimension();
@@ -463,11 +499,11 @@ public class MatrixParameter extends CompoundParameter {
 //        throw new RuntimeException("Not implemented yet!");
 //    }
 
-    private static final String ROW_DIMENSION = "rows";
-    private static final String COLUMN_DIMENSION = "columns";
-    private static final String TRANSPOSE = "transpose";
-    private static final String AS_COMPOUND = "asCompoundParameter";
-    private static final String BEHAVIOR = "test";
+    public static final String ROW_DIMENSION = "rows";
+    public static final String COLUMN_DIMENSION = "columns";
+    public static final String TRANSPOSE = "transpose";
+    public static final String AS_COMPOUND = "asCompoundParameter";
+    public static final String BEHAVIOR = "test";
 
     public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
 
diff --git a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java b/src/dr/inference/model/MatrixParameterInterface.java
similarity index 51%
copy from src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
copy to src/dr/inference/model/MatrixParameterInterface.java
index 4de2a8e..ddb4e26 100644
--- a/src/dr/evomodel/coalescent/CoalescentIntervalProvider.java
+++ b/src/dr/inference/model/MatrixParameterInterface.java
@@ -1,5 +1,5 @@
 /*
- * CoalescentIntervalProvider.java
+ * MatrixParameterInterface.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,21 +23,40 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.evomodel.coalescent;
-
-import dr.evolution.coalescent.IntervalType;
+package dr.inference.model;
 
 /**
- * @author Guy Baele
- * @author Marc Suchard
+ * Created by msuchard on 12/30/15.
  */
-public interface CoalescentIntervalProvider {
+public interface MatrixParameterInterface extends Variable<Double> {
+
+    double getParameterValue(int row, int col);
+
+    Parameter getParameter(int column); // Can return a proxy
+
+    void setParameterValue(int row, int col, double value);
+
+    void setParameterValueQuietly(int row, int col, double value);
+
+    void setParameterValueNotifyChangedAll(int row, int col, double value);
+
+    double[] getColumnValues(int col);
+
+    double[][] getParameterAsMatrix();
+
+    int getColumnDimension();
+
+    int getRowDimension();
+
+    double[] getParameterValues();
+
+    int getUniqueParameterCount();
 
-    public int getCoalescentIntervalDimension();
+    Parameter getUniqueParameter(int index);
 
-    public double getCoalescentInterval(int i);
+    void copyParameterValues(double[] destination, int offset);
 
-    public int getCoalescentIntervalLineageCount(int i);
+//    void setAllParameterValuesQuietly(double[] values);
 
-    public IntervalType getCoalescentIntervalType(int i);
+    void setAllParameterValuesQuietly(double[] values, int offset);
 }
diff --git a/src/dr/inference/model/Model.java b/src/dr/inference/model/Model.java
index dad03d6..b67ac53 100644
--- a/src/dr/inference/model/Model.java
+++ b/src/dr/inference/model/Model.java
@@ -170,7 +170,8 @@ public interface Model extends Identifiable, Serializable {
 
 
     // set to store all created models
-    final static Set<Model> FULL_MODEL_SET = new HashSet<Model>(); 
+    final static Set<Model> FULL_MODEL_SET = new HashSet<Model>();
+	final static Set<Model> CONNECTED_MODEL_SET = new HashSet<Model>();
 
 }
 
diff --git a/src/dr/inference/model/Parameter.java b/src/dr/inference/model/Parameter.java
index 0eabce3..26d45bd 100644
--- a/src/dr/inference/model/Parameter.java
+++ b/src/dr/inference/model/Parameter.java
@@ -26,6 +26,7 @@
 package dr.inference.model;
 
 import dr.inference.parallel.MPIServices;
+import dr.xml.Reportable;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
@@ -163,11 +164,12 @@ public interface Parameter extends Statistic, Variable<Double> {
     boolean isUsed();
 
     public final static Set<Parameter> FULL_PARAMETER_SET = new LinkedHashSet<Parameter>();
+    public final static Set<Parameter> CONNECTED_PARAMETER_SET = new LinkedHashSet<Parameter>();
 
     /**
      * Abstract base class for parameters
      */
-    public abstract class Abstract extends Statistic.Abstract implements Parameter {
+    public abstract class Abstract extends Statistic.Abstract implements Parameter, Reportable {
 
         protected Abstract() {
             FULL_PARAMETER_SET.add(this);
@@ -431,6 +433,34 @@ public interface Parameter extends Statistic, Variable<Double> {
             return buffer.toString();
         }
 
+        public String getReport() {
+            StringBuilder sb = new StringBuilder();
+            Bounds bounds = null;
+            try {
+                bounds = getBounds();
+            } catch (NullPointerException e) {
+                // Do nothing
+            }
+
+            for (int i = 0; i < getDimension(); ++i) {
+                if (getDimensionName(i) != null) {
+                    sb.append(getDimensionName(i)).append("=");
+                }
+                sb.append(String.valueOf(getParameterValue(i)));
+
+                if (bounds != null) {
+                    sb.append("[").append(String.valueOf(bounds.getLowerLimit(i)));
+                    sb.append(", ").append(String.valueOf(bounds.getUpperLimit(i))).append("]");
+                }
+
+                if (i < getDimension() - 1) {
+                    sb.append(", ");
+                }
+            }
+
+            return sb.toString();
+        }
+
         public Element createElement(Document document) {
             throw new IllegalArgumentException();
         }
diff --git a/src/dr/inference/model/PathLikelihood.java b/src/dr/inference/model/PathLikelihood.java
index 4f4e259..7090b02 100644
--- a/src/dr/inference/model/PathLikelihood.java
+++ b/src/dr/inference/model/PathLikelihood.java
@@ -29,6 +29,8 @@ import dr.evomodel.continuous.SoftThresholdLikelihood;
 import dr.xml.*;
 
 import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * A likelihood function which is simply the product of a set of likelihood functions.
@@ -114,6 +116,14 @@ public class PathLikelihood implements Likelihood {
         return destination;
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        Set<Likelihood> set = new HashSet<Likelihood>();
+        set.add(source);
+        set.add(destination);
+        return set;
+    }
+
     public void makeDirty() {
         source.makeDirty();
         destination.makeDirty();
diff --git a/src/dr/inference/model/SumParameter.java b/src/dr/inference/model/SumParameter.java
new file mode 100644
index 0000000..05a491b
--- /dev/null
+++ b/src/dr/inference/model/SumParameter.java
@@ -0,0 +1,129 @@
+/*
+ * ProductParameter.java
+ *
+ * Copyright (c) 2002-2012 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+import java.util.List;
+
+/**
+ * @author Marc Suchard
+ */
+public class SumParameter extends Parameter.Abstract implements VariableListener {
+
+    public SumParameter(List<Parameter> parameter) {
+        this.paramList = parameter;
+        for (Parameter p : paramList) {
+            p.addVariableListener(this);
+        }
+    }
+
+    public int getDimension() {
+        return paramList.get(0).getDimension();
+    }
+
+    protected void storeValues() {
+        for (Parameter p : paramList) {
+            p.storeParameterValues();
+        }
+    }
+
+    protected void restoreValues() {
+        for (Parameter p : paramList) {
+            p.restoreParameterValues();
+        }
+    }
+
+    protected void acceptValues() {
+        for (Parameter p : paramList) {
+            p.acceptParameterValues();
+        }
+    }
+
+    protected void adoptValues(Parameter source) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public double getParameterValue(int dim) {
+        double value = 0;
+        for (int i = 0; i < paramList.size(); i++) {
+            if (i == 0){
+                value = paramList.get(i).getParameterValue(dim);
+            } else {
+                value += paramList.get(i).getParameterValue(dim);
+            }
+        }
+        return value;
+    }
+
+    public void setParameterValue(int dim, double value) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public void setParameterValueQuietly(int dim, double value) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public void setParameterValueNotifyChangedAll(int dim, double value){
+        throw new RuntimeException("Not implemented");
+    }
+
+    public String getParameterName() {
+        if (getId() == null) {
+            StringBuilder sb = new StringBuilder("sum");
+            for (Parameter p : paramList) {
+                sb.append(".").append(p.getId());
+            }
+            setId(sb.toString());
+        }
+        return getId();
+    }
+
+    public void addBounds(Bounds bounds) {
+        this.bounds = bounds;
+    }
+
+    public Bounds<Double> getBounds() {
+        if (bounds == null) {
+            return paramList.get(0).getBounds(); // TODO
+        } else {
+            return bounds;
+        }
+    }
+
+    public void addDimension(int index, double value) {
+        throw new RuntimeException("Not yet implemented.");
+    }
+
+    public double removeDimension(int index) {
+        throw new RuntimeException("Not yet implemented.");
+    }
+
+    public void variableChangedEvent(Variable variable, int index, ChangeType type) {
+        fireParameterChangedEvent(index,type);
+    }
+
+    private final List<Parameter> paramList;
+    private Bounds bounds = null;
+}
diff --git a/src/dr/inference/model/TestThreadedCompoundLikelihood.java b/src/dr/inference/model/TestThreadedCompoundLikelihood.java
index df68e38..d3e3571 100644
--- a/src/dr/inference/model/TestThreadedCompoundLikelihood.java
+++ b/src/dr/inference/model/TestThreadedCompoundLikelihood.java
@@ -28,7 +28,9 @@ package dr.inference.model;
 import dr.util.NumberFormatter;
 
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -73,6 +75,17 @@ public class TestThreadedCompoundLikelihood implements Likelihood {
         return likelihoods.get(i);
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        Set<Likelihood> set = new HashSet<Likelihood>();
+        for (Likelihood l : likelihoods) {
+            set.add(l);
+            set.addAll(l.getLikelihoodSet());
+        }
+        return set;
+    }
+
+
     // **************************************************************
     // Likelihood IMPLEMENTATION
     // **************************************************************
diff --git a/src/dr/inference/model/ThreadedCompoundLikelihood.java b/src/dr/inference/model/ThreadedCompoundLikelihood.java
index 631f684..07e78c3 100644
--- a/src/dr/inference/model/ThreadedCompoundLikelihood.java
+++ b/src/dr/inference/model/ThreadedCompoundLikelihood.java
@@ -26,7 +26,9 @@
 package dr.inference.model;
 
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -68,6 +70,16 @@ public class ThreadedCompoundLikelihood implements Likelihood {
 		}
 	}
 
+	@Override
+	public Set<Likelihood> getLikelihoodSet() {
+		Set<Likelihood> set = new HashSet<Likelihood>();
+		for (Likelihood l : likelihoods) {
+			set.add(l);
+			set.addAll(l.getLikelihoodSet());
+		}
+		return set;
+	}
+
 	public int getLikelihoodCount() {
 		return likelihoods.size();
 	}
diff --git a/src/dr/inference/operators/AbstractHamiltonianMCOperator.java b/src/dr/inference/operators/AbstractHamiltonianMCOperator.java
new file mode 100644
index 0000000..e92a331
--- /dev/null
+++ b/src/dr/inference/operators/AbstractHamiltonianMCOperator.java
@@ -0,0 +1,30 @@
+package dr.inference.operators;
+
+import dr.math.distributions.NormalDistribution;
+
+/**
+ * Created by max on 12/3/15.
+ */
+public abstract class AbstractHamiltonianMCOperator extends AbstractCoercableOperator{
+    public AbstractHamiltonianMCOperator(CoercionMode mode, double momentumSd) {
+        super(mode);
+        this.momentumSd=momentumSd;
+    }
+
+    protected double getMomentumSd()
+    {return momentumSd;}
+
+    protected void setMomentumSd(double momentum){
+        momentumSd=momentum;
+    }
+
+    private double momentumSd;
+    protected double[] momentum;
+
+    protected void drawMomentum(int size){
+        momentum=new double[size];
+        for (int i = 0; i <size ; i++) {
+            momentum[i]= (Double) (new NormalDistribution(0.0, momentumSd)).nextRandom();
+        }
+    }
+}
diff --git a/src/dr/inference/operators/AdaptableVarianceMultivariateNormalOperator.java b/src/dr/inference/operators/AdaptableVarianceMultivariateNormalOperator.java
index daf508a..bb3b3a5 100644
--- a/src/dr/inference/operators/AdaptableVarianceMultivariateNormalOperator.java
+++ b/src/dr/inference/operators/AdaptableVarianceMultivariateNormalOperator.java
@@ -66,6 +66,7 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
     private int iterations, updates, initial, burnin, every;
     private final Parameter parameter;
     private final Transform[] transformations;
+    private final int[] transformationSizes;
     private final int dim;
     // private final double constantFactor;
     private double[] oldMeans, newMeans;
@@ -78,13 +79,14 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
     private double[] epsilon;
     private double[][] proposal;
 
-    public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, double scaleFactor, double[][] inMatrix,
+    public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, int[] transformationSizes, double scaleFactor, double[][] inMatrix,
             double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVarianceMatrix) {
 
         super(mode);
         this.scaleFactor = scaleFactor;
         this.parameter = parameter;
         this.transformations = transformations;
+        this.transformationSizes = transformationSizes;
         this.beta = beta;
         this.iterations = 0;
         this.updates = 0;
@@ -127,9 +129,9 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
         }
     }
 
-    public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, double scaleFactor,
+    public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, int[] transformationSizes, double scaleFactor,
             MatrixParameter varMatrix, double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVariance) {
-        this(parameter, transformations, scaleFactor, varMatrix.getParameterAsMatrix(), weight, beta, initial, burnin, every, mode, isVariance);
+        this(parameter, transformations, transformationSizes, scaleFactor, varMatrix.getParameterAsMatrix(), weight, beta, initial, burnin, every, mode, isVariance);
     }
 
     private double[][] formXtXInverse(double[][] X) {
@@ -187,8 +189,26 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
 
         //transform to the appropriate scale
         double[] transformedX = new double[dim];
-        for (int i = 0; i < dim; i++) {
+        /*for (int i = 0; i < dim; i++) {
             transformedX[i] = transformations[i].transform(x[i]);
+        }*/
+        //iterate over transformation sizes rather than number of parameters
+        //as a transformation might impact multiple parameters
+        int currentIndex = 0;
+        for (int i = 0; i < transformationSizes.length; i++) {
+            if (DEBUG) {
+                System.err.println("currentIndex = " + currentIndex);
+                System.err.println("transformationSizes[i] = " + transformationSizes[i]);
+            }
+            if (transformationSizes[i] > 1) {
+                System.arraycopy(transformations[i].transform(x, currentIndex, currentIndex + transformationSizes[i] - 1),0,transformedX,currentIndex,transformationSizes[i]);
+            } else {
+                transformedX[currentIndex] = transformations[i].transform(x[currentIndex]);
+                if (DEBUG) {
+                    System.err.println("x[" + currentIndex + "] = " + x[currentIndex] + " -> " + transformedX[currentIndex]);
+                }
+            }
+            currentIndex += transformationSizes[i];
         }
 
         if (DEBUG) {
@@ -281,7 +301,7 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
         } else if (iterations == 1) {
 
             if (DEBUG) {
-                System.err.println("  iterations == 1");
+                System.err.println("\niterations == 1");
             }
             //System.err.println("Iteration: " + iterations);
 
@@ -342,7 +362,7 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
             System.err.println("  Drawing new values");
         }
 
-        for (int i = 0; i < dim; i++) {
+        /*for (int i = 0; i < dim; i++) {
             for (int j = i; j < dim; j++) {
                 transformedX[i] += cholesky[j][i] * epsilon[j];
                 // caution: decomposition returns lower triangular
@@ -350,19 +370,75 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
             if (MULTI) {
                 parameter.setParameterValueQuietly(i, transformations[i].inverse(transformedX[i]));
             } else {
-                parameter.setParameterValue(i, transformations[i].inverse(transformedX[i]));
+                if (transformationSizes[i] > 1) {
+                    throw new RuntimeException("Transformations on more than 1 parameter value should be set quietly");
+                } else {
+                    parameter.setParameterValue(i, transformations[i].inverse(transformedX[i]));
+                }
             }
-
             //this should be correct
             //logJacobian += transformations[i].getLogJacobian(parameter.getParameterValue(i)) - transformations[i].getLogJacobian(x[i]);
             logJacobian += transformations[i].getLogJacobian(x[i]) - transformations[i].getLogJacobian(parameter.getParameterValue(i));
+        }*/
+
+        for (int i = 0; i < dim; i++) {
+            for (int j = i; j < dim; j++) {
+                transformedX[i] += cholesky[j][i] * epsilon[j];
+                // caution: decomposition returns lower triangular
+            }
+        }
 
+        if (DEBUG) {
+            System.err.println("\nTransformed X values:");
+            for (int i = 0; i < dim; i++) {
+                System.err.println(transformedX[i]);
+            }
+            System.err.println();
+        }
+
+        //iterate over transformation sizes rather than number of parameters
+        //as a transformation might impact multiple parameters
+        currentIndex = 0;
+        for (int i = 0; i < transformationSizes.length; i++) {
+            if (DEBUG) {
+                System.err.println("currentIndex = " + currentIndex);
+                System.err.println("transformationSizes[i] = " + transformationSizes[i]);
+            }
+            if (MULTI) {
+                if (transformationSizes[i] > 1) {
+                    double[] temp = transformations[i].inverse(transformedX, currentIndex, currentIndex + transformationSizes[i] - 1);
+                    for (int k = 0; k < temp.length; k++) {
+                        parameter.setParameterValueQuietly(currentIndex + k, temp[k]);
+                    }
+                    logJacobian += transformations[i].getLogJacobian(x, currentIndex, currentIndex + transformationSizes[i] - 1) - transformations[i].getLogJacobian(temp, 0, transformationSizes[i] - 1);
+                } else {
+                    parameter.setParameterValueQuietly(currentIndex, transformations[i].inverse(transformedX[currentIndex]));
+                    logJacobian += transformations[i].getLogJacobian(x[currentIndex]) - transformations[i].getLogJacobian(parameter.getParameterValue(currentIndex));
+                }
+                if (DEBUG) {
+                    System.err.println("Current logJacobian = " + logJacobian);
+                }
+            } else {
+                if (transformationSizes[i] > 1) {
+                    //TODO: figure out if this is really a problem ...
+                    throw new RuntimeException("Transformations on more than 1 parameter value should be set quietly");
+                } else {
+                    parameter.setParameterValue(currentIndex, transformations[i].inverse(transformedX[currentIndex]));
+                    logJacobian += transformations[i].getLogJacobian(x[currentIndex]) - transformations[i].getLogJacobian(parameter.getParameterValue(currentIndex));
+                }
+                if (DEBUG) {
+                    System.err.println("Current logJacobian = " + logJacobian);
+                }
+            }
+            currentIndex += transformationSizes[i];
         }
 
         if (DEBUG) {
+            System.err.println("Proposed parameter values:");
             for (int i = 0; i < dim; i++) {
                 System.err.println(x[i] + " -> " + parameter.getValue(i));
             }
+            System.err.println("LogJacobian: " + logJacobian);
         }
 
         if (MULTI) {
@@ -525,7 +601,7 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
             if (DEBUG) {
-                System.err.println("Parsing AdaptableVarianceMultivariateNormalOperator.");
+                System.err.println("\nParsing AdaptableVarianceMultivariateNormalOperator.");
             }
 
             CoercionMode mode = CoercionMode.parseMode(xo);
@@ -586,31 +662,75 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
             	System.err.println();
             }*/
 
+            int[] transformationSizes = new int[dim];
+            int transformationSizeCounter = 0;
+
             Transform[] transformations = new Transform[dim];
             for (int i = 0; i < dim; i++) {
                 transformations[i] = Transform.NONE;
             }
 
+            if (DEBUG) {
+                System.err.println("Transformations & transformation sizes:");
+            }
+
+            //TODO: add LOG_CONSTRAINED_SUM transformation to transformations array!
             for (int i = 0; i < xo.getChildCount(); i++) {
                 Object child = xo.getChild(i);
                 if (child instanceof Transform.ParsedTransform) {
                     Transform.ParsedTransform thisObject = (Transform.ParsedTransform) child;
 
                     if (DEBUG) {
-                        System.err.println("Transformations:");
+                        System.err.println(thisObject.transform.getTransformName());
                     }
-                    for (int j = thisObject.start; j < thisObject.end; ++j) {
-                        transformations[j] = thisObject.transform;
+                    if (thisObject.transform.equals(Transform.LOG_CONSTRAINED_SUM)) {
+                        transformations[transformationSizeCounter] = thisObject.transform;
+                        transformationSizes[transformationSizeCounter] = thisObject.end - thisObject.start;
                         if (DEBUG) {
-                            System.err.print(transformations[j].getTransformName() + " ");
+                            System.err.println("Transformation size = " + transformationSizes[transformationSizeCounter]);
+                        }
+                        transformationSizeCounter++;
+                    } else {
+                        for (int j = thisObject.start; j < thisObject.end; ++j) {
+                            transformations[transformationSizeCounter] = thisObject.transform;
+                            transformationSizes[transformationSizeCounter] = 1;
+                            if (DEBUG) {
+                                System.err.println("Transformation size = " + transformationSizes[transformationSizeCounter]);
+                            }
+                            transformationSizeCounter++;
                         }
-                    }
-                    if (DEBUG) {
-                        System.err.println();
                     }
                 }
             }
 
+            //determine array length for transformationSizes = transformationSizeCounter - 1;
+            if (DEBUG) {
+                System.err.println("\nCleaning up transformation and size arrays");
+                System.err.println("transformationSizeCounter = " + transformationSizeCounter);
+            }
+            int temp[] = new int[transformationSizeCounter];
+            Transform tempTransform[] = new Transform[transformationSizeCounter];
+            for (int i = 0; i < temp.length; i++) {
+                temp[i] = transformationSizes[i];
+                tempTransform[i] = transformations[i];
+                if (transformationSizes[i] == 0 || temp[i] == 0) {
+                    throw new XMLParseException("Transformation size 0 encountered");
+                }
+            }
+            transformationSizes = temp;
+            transformations = tempTransform;
+            if (DEBUG) {
+                System.err.println("\nChecking transformation array contents");
+                for (int i = 0; i < transformations.length; i++) {
+                    System.err.println(transformations[i].getTransformName());
+                }
+                System.err.println("\nChecking size array contents");
+                for (int i = 0; i < transformationSizes.length; i++) {
+                    System.err.print(transformationSizes[i] + " ");
+                }
+                System.err.println();
+            }
+
             // Make sure varMatrix is square and dim(varMatrix) = dim(parameter)
 
             if (!formXtXInverse) {
@@ -624,9 +744,9 @@ public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercab
             /*java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the adaptable variance multivariate normal operator:" +
 					"\n beta = " + beta + "\n initial = " + initial + "\n burnin = " + burnin + "\n every = " + every +
 					"\n If you use this operator, please cite: " + 
-			"   Guy Baele, Philippe Lemey, Marc A. Suchard. 2014. In preparation.");*/
+			"   Guy Baele, Philippe Lemey, Marc A. Suchard. 2016. In preparation.");*/
 
-            return new AdaptableVarianceMultivariateNormalOperator(parameter, transformations, scaleFactor, varMatrix, weight, beta, initial, burnin, every, mode, !formXtXInverse);
+            return new AdaptableVarianceMultivariateNormalOperator(parameter, transformations, transformationSizes, scaleFactor, varMatrix, weight, beta, initial, burnin, every, mode, !formXtXInverse);
         }
 
         //************************************************************************
diff --git a/src/dr/inference/operators/EllipticalSliceOperator.java b/src/dr/inference/operators/EllipticalSliceOperator.java
index 590d5fa..32ce920 100644
--- a/src/dr/inference/operators/EllipticalSliceOperator.java
+++ b/src/dr/inference/operators/EllipticalSliceOperator.java
@@ -33,6 +33,7 @@ import dr.inference.model.*;
 import dr.inference.prior.Prior;
 import dr.inferencexml.operators.EllipticalSliceOperatorParser;
 import dr.math.MathUtils;
+import dr.math.distributions.CompoundGaussianProcess;
 import dr.math.distributions.GaussianProcessRandomGenerator;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.util.Attribute;
@@ -76,7 +77,17 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
             throw new IllegalArgumentException("Invalid bracket angle");
         }
 
-        // TODO Must set priorMean if guassianProcess does not have a 0-mean.
+        // Check dimensions of variable and gaussianProcess
+        int dimVariable = variable.getDimension();
+        double[] draw = (double[]) gaussianProcess.nextRandom();
+        int dimDraw = draw.length;
+
+        if (dimVariable != dimDraw) {
+            throw new IllegalArgumentException("Dimension of variable (" + dimVariable +
+                    ") does not match dimension of Gaussian process draw (" + dimDraw + ")" );
+        }
+
+        // TODO Must set priorMean if gaussianProcess does not have a 0-mean.
     }
 
     public Variable<Double> getVariable() {
@@ -89,13 +100,72 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
                 gaussianProcess.getLikelihood().getLogLikelihood();
     }
 
-    public double doOperation(Prior prior, Likelihood likelihood) throws OperatorFailedException {
+    private void unwindCompoundLikelihood(Likelihood likelihood, List<Likelihood> list) {
+        if (likelihood instanceof CompoundLikelihood) {
+            for (Likelihood like : ((CompoundLikelihood) likelihood).getLikelihoods()) {
+                unwindCompoundLikelihood(like, list);
+            }
+        } else {
+            list.add(likelihood);
+        }
+    }
+
+    private List<Likelihood> unwindCompoundLikelihood(Likelihood likelihood) {
+        List<Likelihood> list = new ArrayList<Likelihood>();
+        unwindCompoundLikelihood(likelihood, list);
+        return list;
+    }
+
+    private boolean containsGaussianProcess(Likelihood likelihood) {
+        if (gaussianProcess instanceof CompoundGaussianProcess) {
+            return ((CompoundGaussianProcess) gaussianProcess).contains(likelihood);
+        } else {
+            return gaussianProcess == likelihood;
+        }
+    }
+
+    private double evaluateDensity(Prior prior, Likelihood likelihood, double pathParameter) {
         double logPosterior = evaluate(likelihood, prior, pathParameter);
-        double logGaussianPrior = getLogGaussianPrior();
+        double logGaussianPrior = getLogGaussianPrior() * pathParameter;
+
+        return logPosterior - logGaussianPrior;
+    }
+
+    public double doOperation(Prior prior, Likelihood likelihood) throws OperatorFailedException {
 
-        // Cut-off depends only on non-GP contribution to posterior
-        double cutoffDensity = logPosterior - logGaussianPrior + MathUtils.randomLogDouble();
-        drawFromSlice(prior, likelihood, cutoffDensity);
+//        System.err.println("Likelihood type:" + likelihood.getClass().getName());
+
+        if (MINIMAL_EVALUATION) {
+
+            List<Likelihood> fullList = unwindCompoundLikelihood(likelihood);
+
+//            List<Likelihood> removeList = new ArrayList<Likelihood>();
+
+            List<Likelihood> subList = new ArrayList<Likelihood>();
+            for (Likelihood like : fullList) {
+                if (!containsGaussianProcess(like)) {
+                    subList.add(like);
+                } //else {
+//                    removeList.add(like);
+//                }
+            }
+            CompoundLikelihood cl = new CompoundLikelihood(subList);
+//            CompoundLikelihood removeCl = new CompoundLikelihood(removeList);
+//            CompoundLikelihood fullCl = new CompoundLikelihood(fullList);
+
+            double logDensity = cl.getLogLikelihood();
+            double cutoffDensity = logDensity + MathUtils.randomLogDouble();
+            drawFromSlice(cl, cutoffDensity);
+
+        } else {
+
+            double logPosterior = evaluate(likelihood, prior, pathParameter);
+            double logGaussianPrior = getLogGaussianPrior() * pathParameter;
+
+            // Cut-off depends only on non-GP contribution to posterior
+            double cutoffDensity = logPosterior - logGaussianPrior + MathUtils.randomLogDouble();
+            drawFromSlice(prior, likelihood, cutoffDensity);
+        }
 
         // No need to set variable, as SliceInterval has already done this (and recomputed posterior)
         return 0;
@@ -120,8 +190,7 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         return r;
     }
 
-    private void setVariable(double[] x) {
-
+    private void transformPoint(double[] x) {
         if (translationInvariant) {
             int dim = 2; // TODO How to determine?
 
@@ -169,14 +238,32 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
 //            System.err.println("");
 //            System.exit(-1);
         }
+    }
 
-//        boolean switchSign = x[0] > 0.0;
-        for (int i = 0; i < x.length; ++i) {
-//            if (switchSign) {
-//                x[i] *= -1;
-//            }
-            variable.setParameterValueQuietly(i, x[i]);
+    private void setAllParameterValues(double[] x) {
+        if (variable instanceof MatrixParameterInterface) {
+            ((MatrixParameterInterface) variable).setAllParameterValuesQuietly(x, 0);
+        } else {
+            for (int i = 0; i < x.length; ++i) {
+                variable.setParameterValueQuietly(i, x[i]);
+            }
         }
+    }
+
+    private void setVariable(double[] x) {
+
+        transformPoint(x);
+
+        setAllParameterValues(x);
+
+////        boolean switchSign = x[0] > 0.0;
+//        for (int i = 0; i < x.length; ++i) {
+////            if (switchSign) {
+////                x[i] *= -1;
+////            }
+//            variable.setParameterValueQuietly(i, x[i]);
+//        }
+
         if (signalConstituentParameters) {
             variable.fireParameterChangedEvent();
         } else {
@@ -263,6 +350,40 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         }
     }
 
+    private void drawFromSlice(CompoundLikelihood likelihood, double cutoffDensity) {
+        // Do nothing
+        double[] x = variable.getParameterValues();
+        double[] nu = (double[]) gaussianProcess.nextRandom();
+
+        double phi;
+        Interval phiInterval;
+
+        if (bracketAngle == 0.0) {
+            phi = MathUtils.nextDouble() * 2.0 * Math.PI;
+            phiInterval = new Interval(phi - 2.0 * Math.PI, phi);
+        } else {
+            double phi_min = -bracketAngle * MathUtils.nextDouble();
+            double phi_max = phi_min + bracketAngle;
+            phiInterval = new Interval(phi_min, phi_max);
+            phi = phiInterval.draw();
+        }
+
+
+        boolean done = false;
+        while (!done) {
+            double[] xx = pointOnEllipse(x, nu, phi, priorMean);
+            setVariable(xx);
+            double logDensity = likelihood.getLogLikelihood();
+
+            if (logDensity > cutoffDensity) {
+                done = true;
+            } else {
+                phiInterval.adjust(phi);
+                phi = phiInterval.draw();
+            }
+        }
+    }
+
     private class Interval {
         double lower;
         double upper;
@@ -375,8 +496,9 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         }
     }
 
+    private static final boolean MINIMAL_EVALUATION = true;
 
-    private double pathParameter=1.0;
+    private double pathParameter = 1.0;
     private final Parameter variable;
     private int current;
     private boolean drawByRow;
diff --git a/src/dr/inference/operators/LoadingsGibbsOperator.java b/src/dr/inference/operators/LoadingsGibbsOperator.java
index c8285cf..f3509d2 100644
--- a/src/dr/inference/operators/LoadingsGibbsOperator.java
+++ b/src/dr/inference/operators/LoadingsGibbsOperator.java
@@ -212,7 +212,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
     private void copy(int i, double[] random) {
         TransposedBlockUpperTriangularMatrixParameter changing = (TransposedBlockUpperTriangularMatrixParameter) LFM.getLoadings();
         for (int j = 0; j < random.length; j++) {
-            changing.setParameterValue(i, j, random[j]);
+            changing.setParameterValueQuietly(i, j, random[j]);
         }
     }
 
@@ -312,6 +312,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
                 currentMean = meanArray.listIterator();
             }
             drawI(i, currentPrecision, currentMidMean, currentMean);
+            LFM.getLoadings().fireParameterChangedEvent(i, null);
 //            LFM.getLoadings().fireParameterChangedEvent();
         }
         return 0;
diff --git a/src/dr/inference/operators/OperatorSchedule.java b/src/dr/inference/operators/OperatorSchedule.java
index 04fbc96..4f26f6a 100644
--- a/src/dr/inference/operators/OperatorSchedule.java
+++ b/src/dr/inference/operators/OperatorSchedule.java
@@ -71,11 +71,21 @@ public interface OperatorSchedule extends Serializable {
      */
     int getMinimumAcceptAndRejectCount();
 
-    final int DEFAULT_SCHEDULE = 0;
-    final int LOG_SCHEDULE = 1;
-    final int SQRT_SCHEDULE = 2;
+    public enum OptimizationTransform {
+        DEFAULT("default"),
+        LOG("log"),
+        SQRT("sqrt"),
+        LINEAR("linear");
 
-    final String DEFAULT_STRING = "default";
-    final String LOG_STRING = "log";
-    final String SQRT_STRING = "sqrt";
+        OptimizationTransform(String name) {
+            this.name = name;
+        }
+
+        @Override
+        public String toString() {
+            return name;
+        }
+
+        private final String name;
+    };
 }
diff --git a/src/dr/inference/operators/SimpleOperatorSchedule.java b/src/dr/inference/operators/SimpleOperatorSchedule.java
index ce188c1..5a47fb5 100644
--- a/src/dr/inference/operators/SimpleOperatorSchedule.java
+++ b/src/dr/inference/operators/SimpleOperatorSchedule.java
@@ -46,7 +46,7 @@ public class SimpleOperatorSchedule implements OperatorSchedule, Loggable {
 	double totalWeight = 0;
 	int current = 0;
 	boolean sequential = false;
-	int optimizationSchedule = OperatorSchedule.DEFAULT_SCHEDULE;
+	OptimizationTransform optimizationSchedule = OptimizationTransform.DEFAULT;
 
 	public SimpleOperatorSchedule() {
 		operators = new Vector<MCMCOperator>();
@@ -115,14 +115,17 @@ public class SimpleOperatorSchedule implements OperatorSchedule, Loggable {
 
 	public double getOptimizationTransform(double d) {
         switch( optimizationSchedule ) {
-            case LOG_SCHEDULE:  return Math.log(d);
-            case SQRT_SCHEDULE: return Math.sqrt(d);
+			case DEFAULT:
+            case LOG:  return Math.log(d);
+            case SQRT: return Math.sqrt(d);
+			case LINEAR: return d;
+
+			default: throw new UnsupportedOperationException("Unknown enum value");
         }
-		return d;
 	}
 
-	public void setOptimizationSchedule(int schedule) {
-		optimizationSchedule = schedule;
+	public void setOptimizationSchedule(OptimizationTransform optimizationSchedule) {
+		this.optimizationSchedule = optimizationSchedule;
 	}
 
     public int getMinimumAcceptAndRejectCount() {
diff --git a/src/dr/inferencexml/distribution/CompoundGaussianProcessParser.java b/src/dr/inferencexml/distribution/CompoundGaussianProcessParser.java
new file mode 100644
index 0000000..f648632
--- /dev/null
+++ b/src/dr/inferencexml/distribution/CompoundGaussianProcessParser.java
@@ -0,0 +1,126 @@
+/*
+ * CompoundGaussianProcessParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inferencexml.distribution;
+
+import dr.inference.distribution.AbstractDistributionLikelihood;
+import dr.inference.distribution.CachedDistributionLikelihood;
+import dr.inference.distribution.DistributionLikelihood;
+import dr.inference.distribution.MultivariateDistributionLikelihood;
+import dr.inference.model.Likelihood;
+import dr.inference.model.Variable;
+import dr.math.distributions.CompoundGaussianProcess;
+import dr.math.distributions.GaussianProcessRandomGenerator;
+import dr.util.Attribute;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+/**
+ * @author Marc Suchard
+ */
+public class CompoundGaussianProcessParser extends AbstractXMLObjectParser {
+
+    public static final String NAME = "compoundGaussianProcess";
+
+    public String getParserName() {
+        return NAME;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        List<GaussianProcessRandomGenerator> gpList = new ArrayList<GaussianProcessRandomGenerator>();
+        List<Likelihood> likelihoodList = new ArrayList<Likelihood>();
+        List<Integer> copyList = new ArrayList<Integer>();
+
+        for (int i = 0; i < xo.getChildCount(); ++i) {
+            Object obj = xo.getChild(i);
+            GaussianProcessRandomGenerator gp = null;
+            Likelihood likelihood = null;
+            int copies = -1;
+            if (obj instanceof DistributionLikelihood) {
+                DistributionLikelihood dl = (DistributionLikelihood) obj;
+                if (!(dl.getDistribution() instanceof GaussianProcessRandomGenerator)) {
+                    throw new XMLParseException("Not a Gaussian process");
+                }
+                likelihood = dl;
+                gp = (GaussianProcessRandomGenerator) dl.getDistribution();
+                copies = 0;
+                for (Attribute<double[]> datum : dl.getDataList()) {
+//                    Double draw = (Double) gp.nextRandom();
+//                    System.err.println("DL: " + datum.getAttributeName() + " " + datum.getAttributeValue().length + " " + "1");
+                    copies += datum.getAttributeValue().length;
+                }
+            } else if (obj instanceof MultivariateDistributionLikelihood) {
+                MultivariateDistributionLikelihood mdl = (MultivariateDistributionLikelihood) obj;
+                if (!(mdl.getDistribution() instanceof GaussianProcessRandomGenerator)) {
+                    throw new XMLParseException("Not a Gaussian process");
+                }
+                likelihood = mdl;
+                gp = (GaussianProcessRandomGenerator) mdl.getDistribution();
+                copies = 0;
+                double[] draw = (double[]) gp.nextRandom();
+                for (Attribute<double[]> datum : mdl.getDataList()) {
+//                    System.err.println("ML: " + datum.getAttributeName() + " " + datum.getAttributeValue().length + " " + draw.length);
+                    copies += datum.getAttributeValue().length / draw.length;
+                }
+            } else if (obj instanceof GaussianProcessRandomGenerator) {
+                gp = (GaussianProcessRandomGenerator) obj;
+                likelihood = gp.getLikelihood();
+                copies = 1;
+            } else {
+                throw new XMLParseException("Not a Gaussian process");
+            }
+            gpList.add(gp);
+            likelihoodList.add(likelihood);
+            copyList.add(copies);
+        }
+
+//        System.exit(-1);
+        return new CompoundGaussianProcess(gpList, likelihoodList, copyList);
+    }
+
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            new ElementRule(GaussianProcessRandomGenerator.class, 1, Integer.MAX_VALUE),
+    };
+
+    public String getParserDescription() {
+        return "Returned a Gaussian process formed from an ordered list of independent Gaussian processes";
+    }
+
+    public Class getReturnType() {
+        return GaussianProcessRandomGenerator.class;
+    }
+}
diff --git a/src/dr/inferencexml/distribution/DistributionLikelihoodParser.java b/src/dr/inferencexml/distribution/DistributionLikelihoodParser.java
index 1317cdf..4c1a477 100644
--- a/src/dr/inferencexml/distribution/DistributionLikelihoodParser.java
+++ b/src/dr/inferencexml/distribution/DistributionLikelihoodParser.java
@@ -28,6 +28,7 @@ package dr.inferencexml.distribution;
 import dr.inference.distribution.DistributionLikelihood;
 import dr.inference.distribution.ParametricDistributionModel;
 import dr.inference.model.Statistic;
+import dr.math.distributions.RandomGenerator;
 import dr.xml.*;
 
 /**
diff --git a/src/dr/inferencexml/distribution/DistributionModelParser.java b/src/dr/inferencexml/distribution/DistributionModelParser.java
index 99eb425..848a589 100644
--- a/src/dr/inferencexml/distribution/DistributionModelParser.java
+++ b/src/dr/inferencexml/distribution/DistributionModelParser.java
@@ -37,6 +37,7 @@ public abstract class DistributionModelParser extends AbstractXMLObjectParser {
     public static final String MEAN = "mean";
     public static final String SHAPE = "shape";
     public static final String SCALE = "scale";
+    public static final String RATE = "rate";
 
     /**
      * @param parameters an array of the parsed parameters, in order of the getParameterNames() array.
diff --git a/src/dr/inferencexml/distribution/GammaDistributionModelParser.java b/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
index 9f0bfe9..e44e431 100644
--- a/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
+++ b/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
@@ -26,31 +26,68 @@
 package dr.inferencexml.distribution;
 
 import dr.inference.distribution.GammaDistributionModel;
-import dr.inference.distribution.ParametricDistributionModel;
 import dr.inference.model.Parameter;
+import dr.inference.model.Statistic;
+import dr.xml.*;
 
-/**
- */
-public class GammaDistributionModelParser extends DistributionModelParser {
+public class GammaDistributionModelParser extends AbstractXMLObjectParser {
+
+    public static final String MEAN = "mean";
+    public static final String SHAPE = "shape";
+    public static final String SCALE = "scale";
+    public static final String RATE = "rate";
+    public static final String OFFSET = "offset";
 
     public String getParserName() {
         return GammaDistributionModel.GAMMA_DISTRIBUTION_MODEL;
     }
 
-    ParametricDistributionModel parseDistributionModel(Parameter[] parameters, double offset) {
-        return new GammaDistributionModel(parameters[0], parameters[1]);
-    }
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        double offset = xo.getAttribute(OFFSET, 0.0);
+
+        Parameter shapeParameter = (Parameter) xo.getElementFirstChild(SHAPE);
+
+        Parameter parameter2;
+        GammaDistributionModel.GammaParameterizationType parameterization;
 
-    public String[] getParameterNames() {
-        return new String[]{SHAPE, SCALE};
+        if (xo.hasChildNamed(SCALE)) {
+            parameter2 = (Parameter)xo.getElementFirstChild(SCALE);
+            parameterization = GammaDistributionModel.GammaParameterizationType.ShapeScale;
+        } else if (xo.hasChildNamed(RATE)) {
+            parameter2 = (Parameter)xo.getElementFirstChild(RATE);
+            parameterization = GammaDistributionModel.GammaParameterizationType.ShapeRate;
+        } else if (xo.hasChildNamed(MEAN)) {
+            parameter2 = (Parameter)xo.getElementFirstChild(MEAN);
+            parameterization = GammaDistributionModel.GammaParameterizationType.ShapeMean;
+        } else {
+            parameter2 = null;
+            parameterization = GammaDistributionModel.GammaParameterizationType.OneParameter;
+        }
+
+        return new GammaDistributionModel(parameterization, shapeParameter, parameter2, offset);
     }
 
-    public String getParserDescription() {
-        return "A model of a gamma distribution.";
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
     }
 
-    public boolean allowOffset() {
-        return false;
+    private final XMLSyntaxRule[] rules = {
+            new ElementRule(SHAPE,
+                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Shape parameter"),
+            new XORRule( new ElementRule[] {
+                    new ElementRule(SCALE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Scale parameter"),
+                    new ElementRule(RATE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Rate parameter"),
+                    new ElementRule(MEAN,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Mean parameter") }, true),
+            AttributeRule.newDoubleRule(OFFSET, true)
+    };
+
+    public String getParserDescription() {
+        return "The gamma probability distribution.";
     }
 
     public Class getReturnType() {
diff --git a/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java b/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
index 5acffe3..054c9c8 100644
--- a/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
+++ b/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
@@ -34,6 +34,7 @@ import dr.inference.distribution.LogisticRegression;
 import dr.inference.model.DesignMatrix;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Parameter;
+import dr.math.matrixAlgebra.Matrix;
 import dr.xml.*;
 
 /**
@@ -56,6 +57,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
 //    public static final String LOG_TRANSFORM = "logDependentTransform";
     public static final String RANDOM_EFFECTS = "randomEffects";
     public static final String CHECK_IDENTIFIABILITY = "checkIdentifiability";
+    public static final String CHECK_FULL_RANK = "checkFullRank";
 
     public String getParserName() {
         return GLM_LIKELIHOOD;
@@ -63,6 +65,8 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
+
+        System.err.println("PASSED 0");
         XMLObject cxo = xo.getChild(DEPENDENT_VARIABLES);
         Parameter dependentParam = null;
         if (cxo != null)
@@ -99,7 +103,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
                 scaleDesign = new Parameter.Default(dependentParam.getDimension(), 0.0);
             else {
                 if (scaleDesign.getDimension() != dependentParam.getDimension())
-                    throw new XMLParseException("Scale and scaleDesign parameters must be the same dimension");
+                    throw new XMLParseException("Scale ("+dependentParam.getDimension()+") and scaleDesign parameters ("+scaleDesign.getDimension()+") must be the same dimension");
                 for (int i = 0; i < scaleDesign.getDimension(); i++) {
                     double value = scaleDesign.getParameterValue(i);
                     if (value < 1 || value > scaleParameter.getDimension())
@@ -110,9 +114,11 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
 
             glm.addScaleParameter(scaleParameter, scaleDesign);
         }
-
+        System.err.println("START 0");
         addIndependentParameters(xo, glm, dependentParam);
+        System.err.println("START 1");
         addRandomEffects(xo, glm, dependentParam);
+        System.err.println("START 2");
 
         boolean checkIdentifiability = xo.getAttribute(CHECK_IDENTIFIABILITY, true);
         if (checkIdentifiability) {
@@ -120,7 +126,9 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
                 throw new XMLParseException("All design matrix predictors are not identifiable in "+  xo.getId());
             }
         }
-
+        System.err.println("PASSED B");
+        checkFullRankOfMatrix = xo.getAttribute(CHECK_FULL_RANK,true);
+        System.err.println("PASSED C");
         return glm;
     }
 
@@ -141,6 +149,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
     public void addIndependentParameters(XMLObject xo, GeneralizedLinearModel glm,
                                          Parameter dependentParam) throws XMLParseException {
         int totalCount = xo.getChildCount();
+//        System.err.println("number of independent parameters = "+totalCount);
 
         for (int i = 0; i < totalCount; i++) {
             if (xo.getChildName(i).compareTo(INDEPENDENT_VARIABLES) == 0) {
@@ -155,15 +164,26 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
                     if (indicator.getDimension() != independentParam.getDimension())
                         throw new XMLParseException("dim(" + independentParam.getId() + ") != dim(" + indicator.getId() + ")");
                 }
-                checkFullRank(designMatrix);
+//                System.err.println("A");
+                if (checkFullRankOfMatrix) {
+                    checkFullRank(designMatrix);
+                }
+//                System.err.println("B");
+
+//                System.err.println(new Matrix(designMatrix.getParameterAsMatrix()));
+//                System.exit(-1);
+
                 glm.addIndependentParameter(independentParam, designMatrix, indicator);
+//                System.err.println("C");
             }
         }
     }
 
+    private boolean checkFullRankOfMatrix;
+
     private void checkFullRank(DesignMatrix designMatrix) throws XMLParseException {
         int fullRank = designMatrix.getColumnDimension();
-
+//        System.err.println("designMatrix getColumnDimension = "+fullRank);
         SingularValueDecomposition svd = new SingularValueDecomposition(
                 new DenseDoubleMatrix2D(designMatrix.getParameterAsMatrix()));
         int realRank = svd.rank();
@@ -200,6 +220,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
                         "dim(" + independentParam.getId() + ") is incompatible with dim (" + designMatrix.getId() + ")"
                 );
             }
+//            System.err.println(independentParam.getId()+" and "+designMatrix.getId());
         }
     }
 
@@ -214,6 +235,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
     private final XMLSyntaxRule[] rules = {
             AttributeRule.newStringRule(FAMILY),
             AttributeRule.newBooleanRule(CHECK_IDENTIFIABILITY, true),
+            AttributeRule.newBooleanRule(CHECK_FULL_RANK, true),
             new ElementRule(DEPENDENT_VARIABLES,
                     new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, true),
             new ElementRule(INDEPENDENT_VARIABLES,
diff --git a/src/dr/inferencexml/distribution/PriorParsers.java b/src/dr/inferencexml/distribution/PriorParsers.java
index 50caec8..6929f67 100644
--- a/src/dr/inferencexml/distribution/PriorParsers.java
+++ b/src/dr/inferencexml/distribution/PriorParsers.java
@@ -1,7 +1,7 @@
 /*
  * PriorParsers.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -26,6 +26,7 @@
 package dr.inferencexml.distribution;
 
 import dr.inference.distribution.DistributionLikelihood;
+import dr.inference.distribution.MultivariateDistributionLikelihood;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Statistic;
 import dr.math.distributions.*;
@@ -60,6 +61,9 @@ public class PriorParsers {
     public static final String OFFSET = "offset";
     public static final String UNINFORMATIVE = "uninformative";
     public static final String HALF_T_PRIOR = "halfTPrior";
+    public static final String DIRICHLET_PRIOR = "dirichletPrior";
+    public static final String COUNTS = "counts";
+
 
     /**
      * A special parser that reads a convenient short form of priors on parameters.
@@ -565,7 +569,7 @@ public class PriorParsers {
 
             final double shape = xo.getDoubleAttribute(SHAPE);
             final double scale = xo.getDoubleAttribute(SCALE);
-            final double offset = xo.getDoubleAttribute(OFFSET);
+            final double offset = xo.getAttribute(OFFSET, 0.0);
 
             DistributionLikelihood likelihood = new DistributionLikelihood(new InverseGammaDistribution(shape, scale), offset);
 
@@ -587,7 +591,7 @@ public class PriorParsers {
         private final XMLSyntaxRule[] rules = {
                 AttributeRule.newDoubleRule(SHAPE),
                 AttributeRule.newDoubleRule(SCALE),
-                AttributeRule.newDoubleRule(OFFSET),
+                AttributeRule.newDoubleRule(OFFSET, true),
                 new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
         };
 
@@ -690,4 +694,48 @@ public class PriorParsers {
         }
     };
 
+    /**
+     * A special parser that reads a convenient short form of priors on parameters.
+     */
+    public static XMLObjectParser DIRICHLET_PRIOR_PARSER = new AbstractXMLObjectParser() {
+
+        public String getParserName() {
+            return DIRICHLET_PRIOR;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            double[] counts = xo.getDoubleArrayAttribute(COUNTS);
+
+            MultivariateDistributionLikelihood likelihood = new MultivariateDistributionLikelihood(new DirichletDistribution(counts, false));
+            for (int j = 0; j < xo.getChildCount(); j++) {
+                if (xo.getChild(j) instanceof Statistic) {
+                    likelihood.addData((Statistic) xo.getChild(j));
+                } else {
+                    throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                }
+            }
+
+            return likelihood;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleArrayRule(COUNTS),
+                new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
+        };
+
+        public String getParserDescription() {
+            return "Calculates the prior probability of some data under a Dirichlet distribution.";
+        }
+
+        public Class getReturnType() {
+            return MultivariateDistributionLikelihood.class;
+        }
+    };
+
+
 }
diff --git a/src/dr/inferencexml/distribution/WorkingPriorParsers.java b/src/dr/inferencexml/distribution/WorkingPriorParsers.java
index 8ad6de3..6204e2f 100644
--- a/src/dr/inferencexml/distribution/WorkingPriorParsers.java
+++ b/src/dr/inferencexml/distribution/WorkingPriorParsers.java
@@ -46,9 +46,13 @@ public class WorkingPriorParsers {
     public final static boolean DEBUG = true;
 
     public static final String NORMAL_REFERENCE_PRIOR = "normalReferencePrior";
+    public static final String NORMAL_WORKING_PRIOR = "normalWorkingPrior";
     public static final String LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR = "logTransformedNormalReferencePrior";
+    public static final String LOG_TRANSFORMED_NORMAL_WORKING_PRIOR = "logTransformedNormalWorkingPrior";
     public static final String LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR = "logitTransformedNormalReferencePrior";
+    public static final String LOGIT_TRANSFORMED_NORMAL_WORKING_PRIOR = "logitTransformedNormalWorkingPrior";
     public static final String GAMMA_REFERENCE_PRIOR = "gammaReferencePrior";
+    public static final String GAMMA_WORKING_PRIOR = "gammaWorkingPrior";
     public static final String PARAMETER_COLUMN = "parameterColumn";
     public static final String DIMENSION = "dimension";
 
@@ -61,6 +65,10 @@ public class WorkingPriorParsers {
             return GAMMA_REFERENCE_PRIOR;
         }
 
+        public String[] getParserNames() {
+            return new String[]{getParserName(), GAMMA_WORKING_PRIOR};
+        }
+
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
             String fileName = xo.getStringAttribute(FileHelpers.FILE_NAME);
@@ -78,6 +86,14 @@ public class WorkingPriorParsers {
 
                 String parameterName = xo.getStringAttribute(PARAMETER_COLUMN);
 
+                int dimension = 1;
+                if (xo.hasAttribute(DIMENSION)) {
+                    dimension = xo.getIntegerAttribute(DIMENSION);
+                }
+                if (dimension <= 0) {
+                    throw new XMLParseException("Column '" + parameterName + "' has dimension smaller than 1.");
+                }
+
                 LogFileTraces traces = new LogFileTraces(fileName, file);
                 traces.loadTraces();
                 long maxState = traces.getMaxState();
@@ -90,30 +106,77 @@ public class WorkingPriorParsers {
                 }
                 traces.setBurnIn(burnin);
 
-                int traceIndexParameter = -1;
-                for (int i = 0; i < traces.getTraceCount(); i++) {
-                    String traceName = traces.getTraceName(i);
-                    if (traceName.trim().equals(parameterName)) {
-                        traceIndexParameter = i;
+                if (dimension == 1) {
+
+                    int traceIndexParameter = -1;
+                    for (int i = 0; i < traces.getTraceCount(); i++) {
+                        String traceName = traces.getTraceName(i);
+                        if (traceName.trim().equals(parameterName)) {
+                            traceIndexParameter = i;
+                        }
                     }
-                }
 
-                if (traceIndexParameter == -1) {
-                    throw new XMLParseException("Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
-                }
+                    if (traceIndexParameter == -1) {
+                        throw new XMLParseException("GammaKDEDistribution: Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
+                    }
 
-                Double[] parameterSamples = new Double[traces.getStateCount()];
+                    Double[] parameterSamples = new Double[traces.getStateCount()];
 
-                DistributionLikelihood likelihood = new DistributionLikelihood(new GammaKDEDistribution((Double[]) traces.getValues(traceIndexParameter).toArray(parameterSamples)));
-                for (int j = 0; j < xo.getChildCount(); j++) {
-                    if (xo.getChild(j) instanceof Statistic) {
-                        likelihood.addData((Statistic) xo.getChild(j));
-                    } else {
-                        throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                    DistributionLikelihood likelihood = new DistributionLikelihood(new GammaKDEDistribution((Double[]) traces.getValues(traceIndexParameter).toArray(parameterSamples)));
+                    for (int j = 0; j < xo.getChildCount(); j++) {
+                        if (xo.getChild(j) instanceof Statistic) {
+                            likelihood.addData((Statistic) xo.getChild(j));
+                        } else {
+                            throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                        }
                     }
-                }
 
-                return likelihood;
+                    return likelihood;
+
+                } else {
+
+                    //dimension > 1
+                    GammaKDEDistribution[] arrayKDE = new GammaKDEDistribution[dimension];
+
+                    for (int i = 0; i < dimension; i++) {
+                        //look for parameterName1, parameterName2, ... if necessary
+                        String newParameterName = parameterName + (i+1);
+                        int traceIndexParameter = -1;
+                        for (int j = 0; j < traces.getTraceCount(); j++) {
+                            String traceName = traces.getTraceName(j);
+                            if (traceName.trim().equals(newParameterName)) {
+                                traceIndexParameter = j;
+                            }
+                        }
+
+                        if (traceIndexParameter == -1) {
+                            throw new XMLParseException("GammaKDEDistribution: Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
+                        }
+
+                        Double[] parameterSamples = new Double[traces.getStateCount()];
+                        traces.getValues(traceIndexParameter).toArray(parameterSamples);
+
+                        arrayKDE[i] = new GammaKDEDistribution(parameterSamples);
+
+                    }
+
+                    MultivariateDistributionLikelihood likelihood = new MultivariateDistributionLikelihood(new MultivariateKDEDistribution(arrayKDE));
+
+                    for (int j = 0; j < xo.getChildCount(); j++) {
+                        if (xo.getChild(j) instanceof Statistic) {
+                            if (DEBUG) {
+                                System.out.println(((Statistic) xo.getChild(j)).toString());
+                                System.out.println(((Statistic) xo.getChild(j)).getDimension());
+                            }
+                            likelihood.addData((Statistic) xo.getChild(j));
+                        } else {
+                            throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                        }
+                    }
+
+                    return likelihood;
+
+                }
 
             } catch (FileNotFoundException fnfe) {
                 throw new XMLParseException("File '" + fileName + "' can not be opened for " + getParserName() + " element.");
@@ -153,6 +216,10 @@ public class WorkingPriorParsers {
             return LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR;
         }
 
+        public String[] getParserNames() {
+            return new String[]{getParserName(), LOG_TRANSFORMED_NORMAL_WORKING_PRIOR};
+        }
+
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
             String fileName = xo.getStringAttribute(FileHelpers.FILE_NAME);
@@ -201,7 +268,7 @@ public class WorkingPriorParsers {
                     }
 
                     if (traceIndexParameter == -1) {
-                        throw new XMLParseException("Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
+                        throw new XMLParseException("LogTransformedNormalKDEDistribution: Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
                     }
 
                     Double[] parameterSamples = new Double[traces.getStateCount()];
@@ -239,13 +306,13 @@ public class WorkingPriorParsers {
                         }
 
                         if (traceIndexParameter == -1) {
-                            throw new XMLParseException("Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
+                            throw new XMLParseException("LogTransformedNormalKDEDistribution: Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
                         }
 
                         Double[] parameterSamples = new Double[traces.getStateCount()];
                         traces.getValues(traceIndexParameter).toArray(parameterSamples);
 
-                        arrayKDE[i] =  new LogTransformedNormalKDEDistribution(parameterSamples);
+                        arrayKDE[i] = new LogTransformedNormalKDEDistribution(parameterSamples);
 
                     }
 
@@ -307,6 +374,10 @@ public class WorkingPriorParsers {
             return LOGIT_TRANSFORMED_NORMAL_REFERENCE_PRIOR;
         }
 
+        public String[] getParserNames() {
+            return new String[]{getParserName(), LOGIT_TRANSFORMED_NORMAL_WORKING_PRIOR};
+        }
+
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
             String fileName = xo.getStringAttribute(FileHelpers.FILE_NAME);
@@ -356,7 +427,7 @@ public class WorkingPriorParsers {
                     }
 
                     if (traceIndexParameter == -1) {
-                        throw new XMLParseException("Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
+                        throw new XMLParseException("LogitTransformedNormalKDEDistribution: Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
                     }
 
                     Double[] parameterSamples = new Double[traces.getStateCount()];
@@ -394,13 +465,13 @@ public class WorkingPriorParsers {
                         }
 
                         if (traceIndexParameter == -1) {
-                            throw new XMLParseException("Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
+                            throw new XMLParseException("LogitTransformedNormalKDEDistribution: Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
                         }
 
                         Double[] parameterSamples = new Double[traces.getStateCount()];
                         traces.getValues(traceIndexParameter).toArray(parameterSamples);
 
-                        arrayKDE[i] =  new LogitTransformedNormalKDEDistribution(parameterSamples);
+                        arrayKDE[i] = new LogitTransformedNormalKDEDistribution(parameterSamples);
 
                     }
 
@@ -464,6 +535,10 @@ public class WorkingPriorParsers {
             return NORMAL_REFERENCE_PRIOR;
         }
 
+        public String[] getParserNames() {
+            return new String[]{getParserName(), NORMAL_WORKING_PRIOR};
+        }
+
         public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
             String fileName = xo.getStringAttribute(FileHelpers.FILE_NAME);
@@ -481,6 +556,14 @@ public class WorkingPriorParsers {
 
                 String parameterName = xo.getStringAttribute(PARAMETER_COLUMN);
 
+                int dimension = 1;
+                if (xo.hasAttribute(DIMENSION)) {
+                    dimension = xo.getIntegerAttribute(DIMENSION);
+                }
+                if (dimension <= 0) {
+                    throw new XMLParseException("Column '" + parameterName + "' has dimension smaller than 1.");
+                }
+
                 LogFileTraces traces = new LogFileTraces(fileName, file);
                 traces.loadTraces();
                 long maxState = traces.getMaxState();
@@ -493,30 +576,77 @@ public class WorkingPriorParsers {
                 }
                 traces.setBurnIn(burnin);
 
-                int traceIndexParameter = -1;
-                for (int i = 0; i < traces.getTraceCount(); i++) {
-                    String traceName = traces.getTraceName(i);
-                    if (traceName.trim().equals(parameterName)) {
-                        traceIndexParameter = i;
+                if (dimension == 1) {
+
+                    int traceIndexParameter = -1;
+                    for (int i = 0; i < traces.getTraceCount(); i++) {
+                        String traceName = traces.getTraceName(i);
+                        if (traceName.trim().equals(parameterName)) {
+                            traceIndexParameter = i;
+                        }
                     }
-                }
 
-                if (traceIndexParameter == -1) {
-                    throw new XMLParseException("Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
-                }
+                    if (traceIndexParameter == -1) {
+                        throw new XMLParseException("NormalKDEDistribution: Column '" + parameterName + "' can not be found for " + getParserName() + " element.");
+                    }
 
-                Double[] parameterSamples = new Double[traces.getStateCount()];
+                    Double[] parameterSamples = new Double[traces.getStateCount()];
 
-                DistributionLikelihood likelihood = new DistributionLikelihood(new NormalKDEDistribution((Double[]) traces.getValues(traceIndexParameter).toArray(parameterSamples)));
-                for (int j = 0; j < xo.getChildCount(); j++) {
-                    if (xo.getChild(j) instanceof Statistic) {
-                        likelihood.addData((Statistic) xo.getChild(j));
-                    } else {
-                        throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                    DistributionLikelihood likelihood = new DistributionLikelihood(new NormalKDEDistribution((Double[]) traces.getValues(traceIndexParameter).toArray(parameterSamples)));
+                    for (int j = 0; j < xo.getChildCount(); j++) {
+                        if (xo.getChild(j) instanceof Statistic) {
+                            likelihood.addData((Statistic) xo.getChild(j));
+                        } else {
+                            throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                        }
                     }
-                }
 
-                return likelihood;
+                    return likelihood;
+
+                } else {
+
+                    //dimension > 1
+                    NormalKDEDistribution[] arrayKDE = new NormalKDEDistribution[dimension];
+
+                    for (int i = 0; i < dimension; i++) {
+                        //look for parameterName1, parameterName2, ... if necessary
+                        String newParameterName = parameterName + (i+1);
+                        int traceIndexParameter = -1;
+                        for (int j = 0; j < traces.getTraceCount(); j++) {
+                            String traceName = traces.getTraceName(j);
+                            if (traceName.trim().equals(newParameterName)) {
+                                traceIndexParameter = j;
+                            }
+                        }
+
+                        if (traceIndexParameter == -1) {
+                            throw new XMLParseException("NormalKDEDistribution: Column '" + newParameterName + "' can not be found for " + getParserName() + " element.");
+                        }
+
+                        Double[] parameterSamples = new Double[traces.getStateCount()];
+                        traces.getValues(traceIndexParameter).toArray(parameterSamples);
+
+                        arrayKDE[i] = new NormalKDEDistribution(parameterSamples);
+
+                    }
+
+                    MultivariateDistributionLikelihood likelihood = new MultivariateDistributionLikelihood(new MultivariateKDEDistribution(arrayKDE));
+
+                    for (int j = 0; j < xo.getChildCount(); j++) {
+                        if (xo.getChild(j) instanceof Statistic) {
+                            if (DEBUG) {
+                                System.out.println(((Statistic) xo.getChild(j)).toString());
+                                System.out.println(((Statistic) xo.getChild(j)).getDimension());
+                            }
+                            likelihood.addData((Statistic) xo.getChild(j));
+                        } else {
+                            throw new XMLParseException("illegal element in " + xo.getName() + " element");
+                        }
+                    }
+
+                    return likelihood;
+
+                }
 
             } catch (FileNotFoundException fnfe) {
                 throw new XMLParseException("File '" + fileName + "' can not be opened for " + getParserName() + " element.");
diff --git a/src/dr/inferencexml/loggers/LoggerParser.java b/src/dr/inferencexml/loggers/LoggerParser.java
index 1e618e9..047fc30 100644
--- a/src/dr/inferencexml/loggers/LoggerParser.java
+++ b/src/dr/inferencexml/loggers/LoggerParser.java
@@ -28,6 +28,7 @@ package dr.inferencexml.loggers;
 import dr.app.beast.BeastVersion;
 import dr.inference.loggers.*;
 import dr.math.MathUtils;
+import dr.math.matrixAlgebra.SymmetricMatrix;
 import dr.util.FileHelpers;
 import dr.util.Identifiable;
 import dr.util.Property;
@@ -104,7 +105,8 @@ public class LoggerParser extends AbstractXMLObjectParser {
 
             title = "BEAST " + version.getVersionString() + "\n" +
                     (header != null ? header + "\n" : "") +
-                    "Generated " + (new Date()).toString() + " [seed=" + MathUtils.getSeed() + "]";
+                    "Generated " + (new Date()).toString() + " [seed=" + MathUtils.getSeed() + "]\n" +
+                    System.getProperty("command_line", "");
         } else {
             if (header != null) {
                 title += "\n" + header;
diff --git a/src/dr/inferencexml/model/CompoundLikelihoodParser.java b/src/dr/inferencexml/model/CompoundLikelihoodParser.java
index 80cccf7..04cf9c1 100644
--- a/src/dr/inferencexml/model/CompoundLikelihoodParser.java
+++ b/src/dr/inferencexml/model/CompoundLikelihoodParser.java
@@ -43,14 +43,15 @@ public class CompoundLikelihoodParser extends AbstractXMLObjectParser {
     public static final String PRIOR = "prior";
     public static final String LIKELIHOOD = "likelihood";
     public static final String PSEUDO_PRIOR = "pseudoPrior";
-    public static final String WORKING_PRIOR = "referencePrior";
+    public static final String REFERENCE_PRIOR = "referencePrior";
+    public static final String WORKING_PRIOR = "workingPrior";
 
     public String getParserName() {
         return COMPOUND_LIKELIHOOD;
     }
 
     public String[] getParserNames() {
-        return new String[]{getParserName(), POSTERIOR, PRIOR, LIKELIHOOD, PSEUDO_PRIOR, WORKING_PRIOR};
+        return new String[]{getParserName(), POSTERIOR, PRIOR, LIKELIHOOD, PSEUDO_PRIOR, REFERENCE_PRIOR, WORKING_PRIOR};
     }
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
diff --git a/src/dr/inferencexml/model/ImmutableParameterParser.java b/src/dr/inferencexml/model/ImmutableParameterParser.java
new file mode 100644
index 0000000..be8026f
--- /dev/null
+++ b/src/dr/inferencexml/model/ImmutableParameterParser.java
@@ -0,0 +1,85 @@
+package dr.inferencexml.model;
+
+import dr.inference.model.Bounds;
+import dr.inference.model.Parameter;
+import dr.inference.model.Statistic;
+import dr.xml.*;
+
+/**
+ * Created by Guy Baele on 18/12/15.
+ */
+public class ImmutableParameterParser extends AbstractXMLObjectParser {
+
+    public static final String IMMUTABLE_PARAMETER = "immutableParameter";
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        final Statistic statistic = (Statistic) xo.getChild(Statistic.class);
+
+        Parameter.Abstract immutableParameter = new Parameter.Abstract() {
+            public void setParameterValueNotifyChangedAll(int dim, double value) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public void setParameterValueQuietly(int dim, double value) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public void storeValues() {
+                //do nothing
+            }
+            public void restoreValues() {
+                //do nothing
+            }
+            public void acceptValues() {
+                //do nothing
+            }
+            public void setParameterValue(int dim, double value) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public double getParameterValue(int dim) {
+                return statistic.getStatisticValue(dim);
+            }
+            public String getParameterName() {
+                if (getId() == null)
+                    return "immutable." + statistic.getStatisticName();
+                return getId();
+            }
+            public void adoptValues(Parameter source) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public void addDimension(int index, double value) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public double removeDimension(int index) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public void addBounds(Bounds<Double> bounds) {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+            public Bounds<Double> getBounds() {
+                throw new RuntimeException("Forbidden call to ImmutableParameter.");
+            }
+        };
+
+        return immutableParameter;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            new ElementRule(Statistic.class),
+    };
+
+    public String getParserDescription() {
+        return "An immutable parameter generated from a statistic.";
+    }
+
+    public Class getReturnType() {
+        return Parameter.class;
+    }
+
+    public String getParserName() {
+        return IMMUTABLE_PARAMETER;
+    }
+
+}
diff --git a/src/dr/inferencexml/model/LatentFactorModelParser.java b/src/dr/inferencexml/model/LatentFactorModelParser.java
index 372b21c..cf1a817 100644
--- a/src/dr/inferencexml/model/LatentFactorModelParser.java
+++ b/src/dr/inferencexml/model/LatentFactorModelParser.java
@@ -44,6 +44,9 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
     public static final String SCALE_DATA="scaleData";
     public static final String CONTINUOUS="continuous";
     public static final String COMPUTE_RESIDUALS_FOR_DISCRETE="computeResidualsForDiscrete";
+    public static final String RECOMPUTE_RESIDUALS="recomputeResiduals";
+    public static final String RECOMPUTE_FACTORS="recomputeFactors";
+    public static final String RECOMPUTE_LOADINGS="recomputeLoadings";
 
 
     public String getParserName() {
@@ -59,6 +62,9 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
         DiagonalMatrix rowPrecision = (DiagonalMatrix) xo.getChild(ROW_PRECISION).getChild(MatrixParameter.class);
         DiagonalMatrix colPrecision = (DiagonalMatrix) xo.getChild(COLUMN_PRECISION).getChild(MatrixParameter.class);
         boolean newModel= xo.getAttribute(COMPUTE_RESIDUALS_FOR_DISCRETE, true);
+        boolean computeResiduals= xo.getAttribute(RECOMPUTE_RESIDUALS, true);
+        boolean computeFactors=xo.getAttribute(RECOMPUTE_FACTORS, true);
+        boolean computeLoadings=xo.getAttribute(RECOMPUTE_LOADINGS, true);
         Parameter continuous=null;
         if(xo.getChild(CONTINUOUS)!=null)
             continuous=(Parameter) xo.getChild(CONTINUOUS).getChild(Parameter.class);
@@ -67,22 +73,25 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
         boolean scaleData=xo.getAttribute(SCALE_DATA, true);
  //       int numFactors = xo.getAttribute(NUMBER_OF_FACTORS, 4);
         Parameter temp=null;
-        for(int i=0; i<loadings.getColumnDimension(); i++)
-        {
-            if(loadings.getParameterValue(i,i)<0)
-            {
-               loadings.setParameterValue(i, i, temp.getParameterValue(i));
-            }
-        }
+//        for(int i=0; i<loadings.getColumnDimension(); i++)
+//        {
+//            if(loadings.getParameterValue(i,i)<0)
+//            {
+//               loadings.setParameterValue(i, i, temp.getParameterValue(i));
+//            }
+//        }
 
 
-        return new LatentFactorModel(dataParameter, factors, loadings, rowPrecision, colPrecision, scaleData, continuous, newModel);
+        return new LatentFactorModel(dataParameter, factors, loadings, rowPrecision, colPrecision, scaleData, continuous, newModel,computeResiduals,computeFactors, computeLoadings);
     }
 
     private static final XMLSyntaxRule[] rules = {
             AttributeRule.newIntegerRule(NUMBER_OF_FACTORS),
             AttributeRule.newBooleanRule(SCALE_DATA, true),
             AttributeRule.newBooleanRule(COMPUTE_RESIDUALS_FOR_DISCRETE, true),
+            AttributeRule.newBooleanRule(RECOMPUTE_FACTORS, true),
+            AttributeRule.newBooleanRule(RECOMPUTE_RESIDUALS, true),
+            AttributeRule.newBooleanRule(RECOMPUTE_LOADINGS,true),
             new ElementRule(DATA, new XMLSyntaxRule[]{
                     new ElementRule(MatrixParameter.class),
             }),
diff --git a/src/dr/inferencexml/model/SumParameterParser.java b/src/dr/inferencexml/model/SumParameterParser.java
new file mode 100644
index 0000000..c46ba29
--- /dev/null
+++ b/src/dr/inferencexml/model/SumParameterParser.java
@@ -0,0 +1,79 @@
+/*
+ * ProductParameterParser.java
+ *
+ * Copyright (c) 2002-2012 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inferencexml.model;
+
+import dr.inference.model.SumParameter;
+import dr.inference.model.Parameter;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ */
+public class SumParameterParser extends AbstractXMLObjectParser {
+
+    public static final String SUM_PARAMETER = "sumParameter";
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        List<Parameter> paramList = new ArrayList<Parameter>();
+        int dim = -1;
+        for (int i = 0; i < xo.getChildCount(); ++i) {
+            Parameter parameter = (Parameter) xo.getChild(i);
+            if (dim == -1) {
+                dim = parameter.getDimension();
+            } else {
+                if (parameter.getDimension() != dim) {
+                    throw new XMLParseException("All parameters in sum '" + xo.getId() + "' must be the same length");
+                }
+            }
+            paramList.add(parameter);
+        }
+
+        return new SumParameter(paramList);
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            new ElementRule(Parameter.class,1,Integer.MAX_VALUE),
+    };
+
+    public String getParserDescription() {
+        return "A element-wise sum of parameters.";
+    }
+
+    public Class getReturnType() {
+        return Parameter.class;
+    }
+
+    public String getParserName() {
+        return SUM_PARAMETER;
+    }
+}
diff --git a/src/dr/inferencexml/operators/SimpleOperatorScheduleParser.java b/src/dr/inferencexml/operators/SimpleOperatorScheduleParser.java
index 7529355..8b4d592 100644
--- a/src/dr/inferencexml/operators/SimpleOperatorScheduleParser.java
+++ b/src/dr/inferencexml/operators/SimpleOperatorScheduleParser.java
@@ -58,12 +58,11 @@ public class SimpleOperatorScheduleParser extends AbstractXMLObjectParser {
             String type = xo.getStringAttribute(OPTIMIZATION_SCHEDULE);
             Logger.getLogger("dr.inference").info("Optimization Schedule: " + type);
 
-            if (type.equals(OperatorSchedule.LOG_STRING))
-                schedule.setOptimizationSchedule(OperatorSchedule.LOG_SCHEDULE);
-            else if (type.equals(OperatorSchedule.SQRT_STRING))
-                schedule.setOptimizationSchedule(OperatorSchedule.SQRT_SCHEDULE);
-            else if (!type.equals(OperatorSchedule.DEFAULT_STRING))
+            try {
+                schedule.setOptimizationSchedule(OperatorSchedule.OptimizationTransform.valueOf(type.toUpperCase()));
+            } catch (IllegalArgumentException iae) {
                 throw new RuntimeException("Unsupported optimization schedule");
+            }
         }
 
         for (int i = 0; i < xo.getChildCount(); i++) {
diff --git a/src/dr/math/MathUtils.java b/src/dr/math/MathUtils.java
index 23d4b94..f2b4a39 100644
--- a/src/dr/math/MathUtils.java
+++ b/src/dr/math/MathUtils.java
@@ -93,47 +93,47 @@ public class MathUtils {
 			System.out.println(i + "\t" + pdf[i]);
 		}
 		throw new Error("randomChoicePDF falls through -- negative, infinite or NaN components in input " +
-                "distribution, or all zeroes?");
+				"distribution, or all zeroes?");
 	}
 
-    /**
-     * @param logpdf array of unnormalised log probabilities
-     * @return a sample according to an unnormalised probability distribution
-     *
-     * Use this if probabilities are rounding to zero when converted to real space
-     */
-    public static int randomChoiceLogPDF(double[] logpdf) {
+	/**
+	 * @param logpdf array of unnormalised log probabilities
+	 * @return a sample according to an unnormalised probability distribution
+	 * <p/>
+	 * Use this if probabilities are rounding to zero when converted to real space
+	 */
+	public static int randomChoiceLogPDF(double[] logpdf) {
 
-        double scalingFactor=Double.NEGATIVE_INFINITY;
+		double scalingFactor = Double.NEGATIVE_INFINITY;
 
-        for (double aLogpdf : logpdf) {
-            if (aLogpdf > scalingFactor) {
-                scalingFactor = aLogpdf;
-            }
-        }
+		for (double aLogpdf : logpdf) {
+			if (aLogpdf > scalingFactor) {
+				scalingFactor = aLogpdf;
+			}
+		}
 
-        if(scalingFactor == Double.NEGATIVE_INFINITY){
-            throw new Error("randomChoiceLogPDF falls through -- all -INF components in input distribution");
-        }
+		if (scalingFactor == Double.NEGATIVE_INFINITY) {
+			throw new Error("randomChoiceLogPDF falls through -- all -INF components in input distribution");
+		}
 
-        for(int j=0; j<logpdf.length; j++){
-            logpdf[j] = logpdf[j] - scalingFactor;
-        }
+		for (int j = 0; j < logpdf.length; j++) {
+			logpdf[j] = logpdf[j] - scalingFactor;
+		}
 
-        double[] pdf = new double[logpdf.length];
+		double[] pdf = new double[logpdf.length];
 
-        for(int j=0; j<logpdf.length; j++){
-            pdf[j] = Math.exp(logpdf[j]);
-        }
+		for (int j = 0; j < logpdf.length; j++) {
+			pdf[j] = Math.exp(logpdf[j]);
+		}
 
-        return randomChoicePDF(pdf);
+		return randomChoicePDF(pdf);
 
-    }
+	}
 
-    /**
+	/**
 	 * @param array to normalize
 	 * @return a new double array where all the values sum to 1.
-	 *         Relative ratios are preserved.
+	 * Relative ratios are preserved.
 	 */
 	public static double[] getNormalized(double[] array) {
 		double[] newArray = new double[array.length];
@@ -232,7 +232,7 @@ public class MathUtils {
 			return random.nextGaussian();
 		}
 	}
-	
+
 	//Mean = alpha / lambda
 	//Variance = alpha / (lambda*lambda)
 
@@ -242,14 +242,14 @@ public class MathUtils {
 		}
 	}
 
-    //Mean = alpha/(alpha+beta)
-    //Variance = (alpha*beta)/(alpha+beta)^2*(alpha+beta+1)
+	//Mean = alpha/(alpha+beta)
+	//Variance = (alpha*beta)/(alpha+beta)^2*(alpha+beta+1)
 
-    public static double nextBeta(double alpha, double beta){
-        double x = nextGamma(alpha, 1);
-        double y = nextGamma(beta, 1);
-        return x/(x+y);
-    }
+	public static double nextBeta(double alpha, double beta) {
+		double x = nextGamma(alpha, 1);
+		double y = nextGamma(beta, 1);
+		return x / (x + y);
+	}
 
 
 	/**
@@ -279,27 +279,26 @@ public class MathUtils {
 		}
 	}
 
-    /**
+	/**
 	 * Access a default instance of this class, access is synchronized
 	 */
 	public static double nextInverseGaussian(double mu, double lambda) {
 		synchronized (random) {
 			/* CODE TAKEN FROM WIKIPEDIA. TESTING DONE WITH RESULTS GENERATED IN R AND LOOK COMPARABLE */
-            double v = random.nextGaussian();   // sample from a normal distribution with a mean of 0 and 1 standard deviation
-            double y = v * v;
-            double x = mu + (mu * mu * y)/(2 * lambda) - (mu/(2 * lambda)) * Math.sqrt(4 * mu * lambda * y + mu * mu * y * y);
-            double test = MathUtils.nextDouble();  // sample from a uniform distribution between 0 and 1
-            if (test <= (mu) / (mu + x)) {
-                return x;
-            }
-            else {
-                return (mu * mu) / x;
-            }
+			double v = random.nextGaussian();   // sample from a normal distribution with a mean of 0 and 1 standard deviation
+			double y = v * v;
+			double x = mu + (mu * mu * y) / (2 * lambda) - (mu / (2 * lambda)) * Math.sqrt(4 * mu * lambda * y + mu * mu * y * y);
+			double test = MathUtils.nextDouble();  // sample from a uniform distribution between 0 and 1
+			if (test <= (mu) / (mu + x)) {
+				return x;
+			} else {
+				return (mu * mu) / x;
+			}
 		}
 	}
 
 
-    /**
+	/**
 	 * Access a default instance of this class, access is synchronized
 	 */
 	public static float nextFloat() {
@@ -344,15 +343,14 @@ public class MathUtils {
 		}
 	}
 
-    /**
-     *
-     * @param low
-     * @param high
-     * @return  uniform between low and high
-     */
-    public static double uniform(double low, double high) {
-        return low + nextDouble() * (high - low);
-    }
+	/**
+	 * @param low
+	 * @param high
+	 * @return uniform between low and high
+	 */
+	public static double uniform(double low, double high) {
+		return low + nextDouble() * (high - low);
+	}
 
 	/**
 	 * Shuffles an array.
@@ -414,40 +412,53 @@ public class MathUtils {
 	}
 
 
-    public static double logHyperSphereVolume(int dimension, double radius) {
-        return dimension * (0.5723649429247001 + Math.log(radius)) +
-                -GammaFunction.lnGamma(dimension / 2.0 + 1.0);
-    }
+	public static double logHyperSphereVolume(int dimension, double radius) {
+		return dimension * (0.5723649429247001 + Math.log(radius)) +
+				-GammaFunction.lnGamma(dimension / 2.0 + 1.0);
+	}
 
-/**
- * Returns sqrt(a^2 + b^2) without under/overflow.
- */
-    public static double hypot(double a, double b) {
-	double r;
-	if (Math.abs(a) > Math.abs(b)) {
-		r = b/a;
-		r = Math.abs(a)*Math.sqrt(1+r*r);
-	} else if (b != 0) {
-		r = a/b;
-		r = Math.abs(b)*Math.sqrt(1+r*r);
-	} else {
-		r = 0.0;
-	}
-	return r;
-    }
-    
-    /**
-     * return double *.????
-     * @param value
-     * @param sf
-     * @return
-     */
-    public static double round(double value, int sf) {
-        NumberFormatter formatter = new NumberFormatter(sf);
-        try {
-            return NumberFormat.getInstance().parse(formatter.format(value)).doubleValue();
-        } catch (ParseException e) {
-            return value;
-        }
-    }
-}
+	/**
+	 * Returns sqrt(a^2 + b^2) without under/overflow.
+	 */
+	public static double hypot(double a, double b) {
+		double r;
+		if (Math.abs(a) > Math.abs(b)) {
+			r = b / a;
+			r = Math.abs(a) * Math.sqrt(1 + r * r);
+		} else if (b != 0) {
+			r = a / b;
+			r = Math.abs(b) * Math.sqrt(1 + r * r);
+		} else {
+			r = 0.0;
+		}
+		return r;
+	}
+
+	/**
+	 * return double *.????
+	 *
+	 * @param value
+	 * @param sf
+	 * @return
+	 */
+	public static double round(double value, int sf) {
+		NumberFormatter formatter = new NumberFormatter(sf);
+		try {
+			return NumberFormat.getInstance().parse(formatter.format(value)).doubleValue();
+		} catch (ParseException e) {
+			return value;
+		}
+	}
+
+	public static int[] getRandomState() {
+		synchronized (random) {
+			return random.getRandomState();
+		}
+	}
+
+	public static void setRandomState(int[] rngState) {
+		synchronized (random) {
+			random.setRandomState(rngState);
+		}
+	}
+}
\ No newline at end of file
diff --git a/src/dr/math/MersenneTwisterFast.java b/src/dr/math/MersenneTwisterFast.java
index 69424da..323de31 100644
--- a/src/dr/math/MersenneTwisterFast.java
+++ b/src/dr/math/MersenneTwisterFast.java
@@ -86,6 +86,8 @@ class MersenneTwisterFast implements Serializable {
 	private static final int UPPER_MASK = 0x80000000; // most significant w-r bits
 	private static final int LOWER_MASK = 0x7fffffff; // least significant r bits
 
+	// mag01[x] = x * MATRIX_A  for x=0,1
+	private static final int MAG_01[] = { 0x0, MATRIX_A };
 
 	// Tempering parameters
 	private static final int TEMPERING_MASK_B = 0x9d2c5680;
@@ -98,7 +100,6 @@ class MersenneTwisterFast implements Serializable {
 
 	private int mt[]; // the array for the state vector
 	private int mti; // mti==N+1 means mt[N] is not initialized
-	private int mag01[];
 
 	// a good initial seed (of int size, though stored in a long)
 	private static final long GOOD_SEED = 4357;
@@ -169,10 +170,6 @@ class MersenneTwisterFast implements Serializable {
 		for (mti = 1; mti < N; mti++)
 			mt[mti] = (69069 * mt[mti - 1]); //& 0xffffffff;
 
-		// mag01[x] = x * MATRIX_A  for x=0,1
-		mag01 = new int[2];
-		mag01[0] = 0x0;
-		mag01[1] = MATRIX_A;
 	}
 
 	public final long getSeed() {
@@ -188,14 +185,14 @@ class MersenneTwisterFast implements Serializable {
 
 			for (kk = 0; kk < N - M; kk++) {
 				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
+				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ MAG_01[y & 0x1];
 			}
 			for (; kk < N - 1; kk++) {
 				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
+				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ MAG_01[y & 0x1];
 			}
 			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
+			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ MAG_01[y & 0x1];
 
 			mti = 0;
 		}
@@ -211,156 +208,35 @@ class MersenneTwisterFast implements Serializable {
 
 
 	public final short nextShort() {
-		int y;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
-
+		int y = nextInt();
 		return (short) (y >>> 16);
 	}
 
 
 	public final char nextChar() {
-		int y;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+		int y = nextInt();
 
 		return (char) (y >>> 16);
 	}
 
 
 	public final boolean nextBoolean() {
-		int y;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+		int y = nextInt();
 
 		return ((y >>> 31) != 0);
 	}
 
 
 	public final byte nextByte() {
-		int y;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+		int y = nextInt();
 
 		return (byte) (y >>> 24);
 	}
 
 
 	public final void nextBytes(byte[] bytes) {
-		int y;
-
 		for (int x = 0; x < bytes.length; x++) {
-			if (mti >= N)   // generate N words at one time
-			{
-				int kk;
-
-				for (kk = 0; kk < N - M; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				for (; kk < N - 1; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-				mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-				mti = 0;
-			}
-
-			y = mt[mti++];
-			y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-			y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-			y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-			y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+			int y = nextInt();
 
 			bytes[x] = (byte) (y >>> 24);
 		}
@@ -368,112 +244,16 @@ class MersenneTwisterFast implements Serializable {
 
 
 	public final long nextLong() {
-		int y;
-		int z;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (z >>> 1) ^ mag01[z & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (z >>> 1) ^ mag01[z & 0x1];
-			}
-			z = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (z >>> 1) ^ mag01[z & 0x1];
-
-			mti = 0;
-		}
-
-		z = mt[mti++];
-		z ^= z >>> 11;                          // TEMPERING_SHIFT_U(z)
-		z ^= (z << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(z)
-		z ^= (z << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(z)
-		z ^= (z >>> 18);                        // TEMPERING_SHIFT_L(z)
+		int y = nextInt();
+		int z = nextInt();
 
 		return (((long) y) << 32) + (long) z;
 	}
 
 
 	public final double nextDouble() {
-		int y;
-		int z;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (z >>> 1) ^ mag01[z & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (z >>> 1) ^ mag01[z & 0x1];
-			}
-			z = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (z >>> 1) ^ mag01[z & 0x1];
-
-			mti = 0;
-		}
-
-		z = mt[mti++];
-		z ^= z >>> 11;                          // TEMPERING_SHIFT_U(z)
-		z ^= (z << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(z)
-		z ^= (z << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(z)
-		z ^= (z >>> 18);                        // TEMPERING_SHIFT_L(z)
+		int y = nextInt();
+		int z = nextInt();
 
 		/* derived from nextDouble documentation in jdk 1.2 docs, see top */
 		return ((((long) (y >>> 6)) << 27) + (z >>> 5)) / (double) (1L << 53);
@@ -486,115 +266,12 @@ class MersenneTwisterFast implements Serializable {
 		} else {
 			double v1, v2, s;
 			do {
-				int y;
-				int z;
-				int a;
-				int b;
-
-				if (mti >= N)   // generate N words at one time
-				{
-					int kk;
-
-					for (kk = 0; kk < N - M; kk++) {
-						y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-					}
-					for (; kk < N - 1; kk++) {
-						y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-					}
-					y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-					mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-					mti = 0;
-				}
-
-				y = mt[mti++];
-				y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-				y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-				y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-				y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
-
-				if (mti >= N)   // generate N words at one time
-				{
-					int kk;
-
-					for (kk = 0; kk < N - M; kk++) {
-						z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + M] ^ (z >>> 1) ^ mag01[z & 0x1];
-					}
-					for (; kk < N - 1; kk++) {
-						z = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + (M - N)] ^ (z >>> 1) ^ mag01[z & 0x1];
-					}
-					z = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-					mt[N - 1] = mt[M - 1] ^ (z >>> 1) ^ mag01[z & 0x1];
-
-					mti = 0;
-				}
-
-				z = mt[mti++];
-				z ^= z >>> 11;                          // TEMPERING_SHIFT_U(z)
-				z ^= (z << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(z)
-				z ^= (z << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(z)
-				z ^= (z >>> 18);                        // TEMPERING_SHIFT_L(z)
-
-				if (mti >= N)   // generate N words at one time
-				{
-					int kk;
-
-					for (kk = 0; kk < N - M; kk++) {
-						a = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + M] ^ (a >>> 1) ^ mag01[a & 0x1];
-					}
-					for (; kk < N - 1; kk++) {
-						a = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + (M - N)] ^ (a >>> 1) ^ mag01[a & 0x1];
-					}
-					a = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-					mt[N - 1] = mt[M - 1] ^ (a >>> 1) ^ mag01[a & 0x1];
-
-					mti = 0;
-				}
-
-				a = mt[mti++];
-				a ^= a >>> 11;                          // TEMPERING_SHIFT_U(a)
-				a ^= (a << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(a)
-				a ^= (a << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(a)
-				a ^= (a >>> 18);                        // TEMPERING_SHIFT_L(a)
-
-				if (mti >= N)   // generate N words at one time
-				{
-					int kk;
-
-					for (kk = 0; kk < N - M; kk++) {
-						b = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + M] ^ (b >>> 1) ^ mag01[b & 0x1];
-					}
-					for (; kk < N - 1; kk++) {
-						b = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-						mt[kk] = mt[kk + (M - N)] ^ (b >>> 1) ^ mag01[b & 0x1];
-					}
-					b = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-					mt[N - 1] = mt[M - 1] ^ (b >>> 1) ^ mag01[b & 0x1];
-
-					mti = 0;
-				}
-
-				b = mt[mti++];
-				b ^= b >>> 11;                          // TEMPERING_SHIFT_U(b)
-				b ^= (b << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(b)
-				b ^= (b << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(b)
-				b ^= (b >>> 18);                        // TEMPERING_SHIFT_L(b)
-
 				/* derived from nextDouble documentation in jdk 1.2 docs, see top */
-				v1 = 2 *
-						(((((long) (y >>> 6)) << 27) + (z >>> 5)) / (double) (1L << 53))
-						- 1;
-				v2 = 2 * (((((long) (a >>> 6)) << 27) + (b >>> 5)) / (double) (1L << 53))
-						- 1;
+				v1 = 2.0 * nextDouble() - 1;
+				v2 = 2.0 * nextDouble() - 1;
 				s = v1 * v1 + v2 * v2;
 			} while (s >= 1);
+
 			double multiplier = Math.sqrt(-2 * Math.log(s) / s);
 			nextNextGaussian = v2 * multiplier;
 			haveNextNextGaussian = true;
@@ -603,31 +280,7 @@ class MersenneTwisterFast implements Serializable {
 	}
 
 	public final float nextFloat() {
-		int y;
-
-		if (mti >= N)   // generate N words at one time
-		{
-			int kk;
-
-			for (kk = 0; kk < N - M; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			for (; kk < N - 1; kk++) {
-				y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-				mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-			}
-			y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-			mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-			mti = 0;
-		}
-
-		y = mt[mti++];
-		y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-		y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-		y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-		y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+		int y = nextInt();
 
 		return (y >>> 8) / ((float) (1 << 24));
 	}
@@ -643,62 +296,14 @@ class MersenneTwisterFast implements Serializable {
 
 		if ((n & -n) == n)  // i.e., n is a power of 2
 		{
-			int y;
-
-			if (mti >= N)   // generate N words at one time
-			{
-				int kk;
-
-				for (kk = 0; kk < N - M; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				for (; kk < N - 1; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-				mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-				mti = 0;
-			}
-
-			y = mt[mti++];
-			y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-			y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-			y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-			y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+			int y = nextInt();
 
 			return (int) ((n * (long) (y >>> 1)) >> 31);
 		}
 
 		int bits, val;
 		do {
-			int y;
-
-			if (mti >= N)   // generate N words at one time
-			{
-				int kk;
-
-				for (kk = 0; kk < N - M; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				for (; kk < N - 1; kk++) {
-					y = (mt[kk] & UPPER_MASK) | (mt[kk + 1] & LOWER_MASK);
-					mt[kk] = mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1];
-				}
-				y = (mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
-				mt[N - 1] = mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1];
-
-				mti = 0;
-			}
-
-			y = mt[mti++];
-			y ^= y >>> 11;                          // TEMPERING_SHIFT_U(y)
-			y ^= (y << 7) & TEMPERING_MASK_B;       // TEMPERING_SHIFT_S(y)
-			y ^= (y << 15) & TEMPERING_MASK_C;      // TEMPERING_SHIFT_T(y)
-			y ^= (y >>> 18);                        // TEMPERING_SHIFT_L(y)
+			int y = nextInt();
 
 			bits = (y >>> 1);
 			val = bits % n;
@@ -925,5 +530,17 @@ class MersenneTwisterFast implements Serializable {
 		}
 	}
 
+	public int[] getRandomState() {
+		int[] state = new int[mt.length + 1];
+		state[0] = mti;
+		System.arraycopy(mt, 0, state, 1, mt.length);
+
+		return state;
+	}
+
+	public void setRandomState(int[] rngState) {
+		mti = rngState[0];
+		System.arraycopy(rngState, 1, mt, 0, mt.length);
+	}
 
 }
diff --git a/src/dr/math/distributions/CompoundGaussianProcess.java b/src/dr/math/distributions/CompoundGaussianProcess.java
new file mode 100644
index 0000000..ec94247
--- /dev/null
+++ b/src/dr/math/distributions/CompoundGaussianProcess.java
@@ -0,0 +1,99 @@
+/*
+ * CompoundGaussianProcess.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.math.distributions;
+
+import dr.inference.distribution.DistributionLikelihood;
+import dr.inference.model.CompoundLikelihood;
+import dr.inference.model.Likelihood;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Marc A. Suchard
+ */
+
+public class CompoundGaussianProcess implements GaussianProcessRandomGenerator {
+
+    private final List<GaussianProcessRandomGenerator> gpList;
+    private final List<Integer> copyList;
+    private final List<Likelihood> likelihoodList;
+    private final CompoundLikelihood compoundLikelihood;
+
+    public CompoundGaussianProcess(List<GaussianProcessRandomGenerator> gpList, List<Likelihood> likelihoodList,
+                                   List<Integer> copyList) {
+        this.gpList = gpList;
+        this.copyList = copyList;
+        this.likelihoodList = likelihoodList;
+        compoundLikelihood = new CompoundLikelihood(likelihoodList);
+    }
+
+    public boolean contains(Likelihood likelihood) {
+        return likelihoodList.contains(likelihood);
+    }
+
+    @Override
+    public Likelihood getLikelihood() { return compoundLikelihood; }
+
+    @Override
+    public Object nextRandom() {
+
+        int size = 0;
+        List<double[]> randomList = new ArrayList<double[]>();
+        int index = 0;
+        for (GaussianProcessRandomGenerator gp : gpList) {
+            final int copies = copyList.get(index);
+            if (likelihoodList.get(index) instanceof DistributionLikelihood) { // Univariate
+                double[] vector = new double[copies];
+                for (int i = 0; i < copies; ++i) {
+                    vector[i] = (Double) gp.nextRandom();
+                }
+                randomList.add(vector);
+                size += vector.length;
+            } else {
+                for (int i = 0; i < copyList.get(index); ++i) {
+                    double[] vector = (double[]) gp.nextRandom();
+                    randomList.add(vector);
+                    size += vector.length;
+                }
+            }
+            ++index;
+        }
+
+        double[] result = new double[size];
+        int offset = 0;
+        for (double[] vector : randomList) {
+            System.arraycopy(vector, 0, result, offset, vector.length);
+            offset += vector.length;
+        }
+        return result;
+    }
+
+    @Override
+    public double logPdf(Object x) {
+        throw new RuntimeException("Not yet implemented");
+    }
+}
diff --git a/src/dr/math/distributions/DirichletDistribution.java b/src/dr/math/distributions/DirichletDistribution.java
index dfbfe5e..9477e66 100644
--- a/src/dr/math/distributions/DirichletDistribution.java
+++ b/src/dr/math/distributions/DirichletDistribution.java
@@ -1,7 +1,7 @@
 /*
  * DirichletDistribution.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -29,19 +29,30 @@ import dr.math.GammaFunction;
 
 /**
  * @author Marc A. Suchard
+ * @author Guy Baele
  */
 public class DirichletDistribution implements MultivariateDistribution {
 
     public static final String TYPE = "dirichletDistribution";
+    public static final boolean DEBUG = false;
 
     private double[] counts;
     private double countSum = 0.0;
+    private double countParameterSum;
     private int dim;
 
+    private boolean sumToNumberOfElements;
+
     private double logNormalizingConstant;
 
-    public DirichletDistribution(double[] counts) {
+    public DirichletDistribution(double[] counts, boolean sumToNumberOfElements) {
         this.counts = counts;
+        this.sumToNumberOfElements = sumToNumberOfElements;
+        if (this.sumToNumberOfElements) {
+            countParameterSum = (double)counts.length;
+        } else {
+            countParameterSum = 1.0;
+        }
         dim = counts.length;
         for (int i = 0; i < dim; i++)
             countSum += counts[i];
@@ -51,8 +62,10 @@ public class DirichletDistribution implements MultivariateDistribution {
 
     private void computeNormalizingConstant() {
         logNormalizingConstant = GammaFunction.lnGamma(countSum);
-        for (int i = 0; i < dim; i++)
+        for (int i = 0; i < dim; i++) {
             logNormalizingConstant -= GammaFunction.lnGamma(counts[i]);
+        }
+        logNormalizingConstant -= dim * Math.log(countParameterSum);
     }
 
 
@@ -63,13 +76,28 @@ public class DirichletDistribution implements MultivariateDistribution {
         }
 
         double logPDF = logNormalizingConstant;
+        double parameterSum = 0.0;
         for (int i = 0; i < dim; i++) {
-            logPDF += (counts[i] - 1) * Math.log(x[i]);
-            if (x[i] <= 0.0 || x[i] >= 1.0) {
+            logPDF += (counts[i] - 1) * (Math.log(x[i]) - Math.log(countParameterSum));
+            parameterSum += x[i];
+            if ((!sumToNumberOfElements && x[i] >= 1.0) || x[i] <= 0.0) {
+                if (DEBUG) {
+                    System.err.println("Invalid parameter value");
+                }
                 logPDF = Double.NEGATIVE_INFINITY;
                 break;
             }
-        }       
+        }
+        if (parameterSum != countParameterSum) {
+            if (DEBUG) {
+                System.err.println("Parameters do not sum to " + countParameterSum);
+                for (int i = 0; i < dim; i++) {
+                    System.err.println("x[" + i + "] = " + x[i]);
+                }
+                System.err.println("Current sum = " + parameterSum);
+            }
+            logPDF = Double.NEGATIVE_INFINITY;
+        }
         return logPDF;
     }
 
@@ -87,4 +115,37 @@ public class DirichletDistribution implements MultivariateDistribution {
     public String getType() {
         return TYPE;
     }
+
+    public static void main(String[] args) {
+
+        //Test Dirichlet distribution for the standard n-simplex
+        System.out.println("Test Dirichlet distribution for the standard n-simplex");
+        //R: log(ddirichlet(c(0.5,0.2,0.3),c(1,2,3))) = 0.07696104
+        double[] counts = new double[3];
+        counts[0] = 1.0;
+        counts[1] = 2.0;
+        counts[2] = 3.0;
+        DirichletDistribution dd = new DirichletDistribution(counts, false);
+        double[] parameterValues = new double[3];
+        parameterValues[0] = 0.5;
+        parameterValues[1] = 0.2;
+        parameterValues[2] = 0.3;
+        System.out.println(dd.logPdf(parameterValues));
+
+        //Test Scaled Dirichlet distribution
+        System.out.println("Test Scaled Dirichlet distribution");
+        //R: log(ddirichlet(c(1.5,0.6,0.9)/3,c(1,2,3))/(3^3)) = -3.218876
+        dd = new DirichletDistribution(counts, true);
+        parameterValues[0] = 1.5;
+        parameterValues[1] = 0.6;
+        parameterValues[2] = 0.9;
+        System.out.println(dd.logPdf(parameterValues));
+
+        parameterValues[0] = 1.0;
+        parameterValues[1] = 1.0;
+        parameterValues[2] = 1.0;
+        System.out.println(dd.logPdf(parameterValues));
+
+    }
+
 }
diff --git a/src/dr/math/distributions/GammaDistribution.java b/src/dr/math/distributions/GammaDistribution.java
index c7171fb..12c9486 100644
--- a/src/dr/math/distributions/GammaDistribution.java
+++ b/src/dr/math/distributions/GammaDistribution.java
@@ -161,6 +161,13 @@ public class GammaDistribution implements Distribution {
                 return 0.0;
         }
 
+        if (shape == 0.0)  // uninformative
+            return 1.0 / x;
+
+        if (shape == -0.5) { // Gelman 2008, hierarchical variance, -1 degrees of freedom
+            return Math.sqrt(x);
+        }
+
         final double xs = x / scale;
 
         if (shape == 1.0) {
diff --git a/src/dr/math/distributions/InverseGaussianDistribution.java b/src/dr/math/distributions/InverseGaussianDistribution.java
index 046c26c..75810c4 100644
--- a/src/dr/math/distributions/InverseGaussianDistribution.java
+++ b/src/dr/math/distributions/InverseGaussianDistribution.java
@@ -220,7 +220,8 @@ public class InverseGaussianDistribution implements Distribution {
      */
     public static double quantile(double z, double m, double shape) {
         if(z < 0.01 || z > 0.99) {
-            throw new RuntimeException("Quantile is too low/high to calculate (numerical estimation for extreme values is incomplete");
+	    System.err.print("Quantile is " + z);
+            throw new RuntimeException("Quantile is too low/high to calculate (numerical estimation for extreme values is incomplete)");
         }
 
         /* Approximation method used by Mudholkar GS, Natarajan R (1999)
diff --git a/src/dr/math/distributions/MultivariateKDEDistribution.java b/src/dr/math/distributions/MultivariateKDEDistribution.java
index 018d526..5b07d45 100644
--- a/src/dr/math/distributions/MultivariateKDEDistribution.java
+++ b/src/dr/math/distributions/MultivariateKDEDistribution.java
@@ -31,7 +31,7 @@ package dr.math.distributions;
 public class MultivariateKDEDistribution implements MultivariateDistribution {
 	
 	public static final String TYPE = "multivariateKDE";
-    public static final boolean DEBUG = true;
+    public static final boolean DEBUG = false;
 	
 	private Distribution[] multivariateKDE;
 	private int dimension;
@@ -78,9 +78,10 @@ public class MultivariateKDEDistribution implements MultivariateDistribution {
 		}
 
         if (DEBUG){
+            System.err.println("MultivariateKDEDistribution, dimension = " + dimension);
             for (int i = 0; i < dimension; i++) {
                 System.err.println(i + ", " + "x[i] = " + x[i] + ", logPdf = " + multivariateKDE[i].logPdf(x[i]));
-                System.err.println("    mean = " + multivariateKDE[i].mean() + ", variance = " + multivariateKDE[i].variance());
+                //System.err.println("    mean = " + multivariateKDE[i].mean() + ", variance = " + multivariateKDE[i].variance());
             }
         }
 		
diff --git a/src/dr/math/distributions/MultivariateNormalDistribution.java b/src/dr/math/distributions/MultivariateNormalDistribution.java
index ceefe83..9a06851 100644
--- a/src/dr/math/distributions/MultivariateNormalDistribution.java
+++ b/src/dr/math/distributions/MultivariateNormalDistribution.java
@@ -42,9 +42,26 @@ public class MultivariateNormalDistribution implements MultivariateDistribution,
     private double[][] cholesky = null;
     private Double logDet = null;
 
+    private final boolean hasSinglePrecision;
+    private final double singlePrecision;
+
     public MultivariateNormalDistribution(double[] mean, double[][] precision) {
         this.mean = mean;
         this.precision = precision;
+        this.hasSinglePrecision = false;
+        this.singlePrecision = 1.0;
+    }
+
+    public MultivariateNormalDistribution(double[] mean, double singlePrecision) {
+        this.mean = mean;
+        this.hasSinglePrecision = true;
+        this.singlePrecision = singlePrecision;
+
+        final int dim = mean.length;
+        this.precision = new double[dim][dim];
+        for (int i = 0; i < dim; ++i) {
+            this.precision[i][i] = singlePrecision;
+        }
     }
 
     public String getType() {
@@ -133,7 +150,11 @@ public class MultivariateNormalDistribution implements MultivariateDistribution,
     }
 
     public double logPdf(double[] x) {
-        return logPdf(x, mean, precision, getLogDet(), 1.0);
+        if (hasSinglePrecision) {
+            return logPdf(x, mean, singlePrecision, 1.0);
+        } else {
+            return logPdf(x, mean, precision, getLogDet(), 1.0);
+        }
     }
 
     // scale only modifies precision
@@ -219,7 +240,6 @@ public class MultivariateNormalDistribution implements MultivariateDistribution,
         return result;
     }
 
-
     public static void nextMultivariateNormalCholesky(double[] mean, double[][] cholesky, double sqrtScale, double[] result) {
 
         final int dim = mean.length;
@@ -238,6 +258,25 @@ public class MultivariateNormalDistribution implements MultivariateDistribution,
         }
     }
 
+    public static void nextMultivariateNormalCholesky(final double[] mean, final int meanOffset, final double[][] cholesky,
+                                                      final double sqrtScale, final double[] result, final int resultOffset,
+                                                      final double[] epsilon) {
+
+        final int dim = epsilon.length;
+
+        System.arraycopy(mean, meanOffset, result, resultOffset, dim);
+
+        for (int i = 0; i < dim; i++)
+            epsilon[i] = MathUtils.nextGaussian() * sqrtScale;
+
+        for (int i = 0; i < dim; i++) {
+            for (int j = 0; j <= i; j++) {
+                result[resultOffset + i] += cholesky[i][j] * epsilon[j];
+                // caution: decomposition returns lower triangular
+            }
+        }
+    }
+
     // TODO should be a junit test
     public static void main(String[] args) {
         testPdf();
diff --git a/src/dr/math/distributions/MultivariatePolyaDistributionLikelihood.java b/src/dr/math/distributions/MultivariatePolyaDistributionLikelihood.java
index 481d776..b2cbbde 100644
--- a/src/dr/math/distributions/MultivariatePolyaDistributionLikelihood.java
+++ b/src/dr/math/distributions/MultivariatePolyaDistributionLikelihood.java
@@ -31,6 +31,10 @@ import dr.inference.model.*;
 import dr.math.GammaFunction;
 import dr.xml.*;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
 /**
  * Package: MultivariatePolyaDistributionLikelihood
  * Description:
@@ -238,6 +242,11 @@ public class MultivariatePolyaDistributionLikelihood extends AbstractModel imple
         return false;
     }
 
+    @Override
+    public Set<Likelihood> getLikelihoodSet() {
+        return new HashSet<Likelihood>(Arrays.asList(this));
+    }
+
     public void setUsed() {
     }
 
diff --git a/src/dr/math/distributions/RandomGenerator.java b/src/dr/math/distributions/RandomGenerator.java
index 0844fb0..c302ca4 100644
--- a/src/dr/math/distributions/RandomGenerator.java
+++ b/src/dr/math/distributions/RandomGenerator.java
@@ -30,8 +30,8 @@ package dr.math.distributions;
  */
 public interface RandomGenerator {
 
-    public Object nextRandom();
+    Object nextRandom();
 
-    public double logPdf(Object x);
+    double logPdf(Object x);
 
 }
diff --git a/src/dr/util/NumberFormatter.java b/src/dr/util/NumberFormatter.java
index f7037e6..1152cb7 100644
--- a/src/dr/util/NumberFormatter.java
+++ b/src/dr/util/NumberFormatter.java
@@ -63,7 +63,7 @@ public class NumberFormatter implements Serializable {
 
     public void setSignificantFigures(int sf) {
         this.sf = sf;
-        upperCutoff = Math.pow(10,sf-1);
+        upperCutoff = Math.pow(10, Math.max(sf-1, 0));
         cutoffTable = new double[sf];
         long num = 10;
         for (int i =0; i < cutoffTable.length; i++) {
@@ -71,8 +71,8 @@ public class NumberFormatter implements Serializable {
             num *= 10;
         }
         decimalFormat.setMinimumIntegerDigits(1);
-        decimalFormat.setMaximumFractionDigits(sf-1);
-        decimalFormat.setMinimumFractionDigits(sf-1);
+        decimalFormat.setMaximumFractionDigits(Math.max(sf-1, 0));
+        decimalFormat.setMinimumFractionDigits(Math.max(sf-1, 0));
         decimalFormat.setGroupingUsed(false);
         scientificFormat = new DecimalFormat(getScientificPattern(sf));
         fieldWidth = sf;
@@ -123,7 +123,7 @@ public class NumberFormatter implements Serializable {
 
         double absValue = Math.abs(value);
 
-        if ((absValue > upperCutoff) || (absValue < 0.1 && absValue != 0.0)) {
+        if ((absValue > upperCutoff) || (absValue < (1.0 / upperCutoff) && absValue != 0.0)) {
             buffer.append(scientificFormat.format(value));
         } else {
             int numFractionDigits = 0;
diff --git a/src/dr/util/Transform.java b/src/dr/util/Transform.java
index 8357b53..e9cecd1 100644
--- a/src/dr/util/Transform.java
+++ b/src/dr/util/Transform.java
@@ -1,7 +1,7 @@
 /*
  * Transform.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -47,12 +47,30 @@ public interface Transform {
     double transform(double value);
 
     /**
+     * overloaded transformation that takes and returns an array of doubles
+     * @param values evaluation points
+     * @param from start transformation at this index
+     * @param to end transformation at this index
+     * @return the transformed values
+     */
+    double[] transform(double[] values, int from, int to);
+
+    /**
      * @param value evaluation point
      * @return the inverse transformed value
      */
     double inverse(double value);
 
     /**
+     * overloaded transformation that takes and returns an array of doubles
+     * @param values evaluation points
+     * @param from start transformation at this index
+     * @param to end transformation at this index
+     * @return the transformed values
+     */
+    double[] inverse(double[] values, int from, int to);
+
+    /**
      * @return the transform's name
      */
     String getTransformName();
@@ -61,9 +79,16 @@ public interface Transform {
      * @param value evaluation point
      * @return the log of the transform's jacobian
      */
-
     public double getLogJacobian(double value);
 
+    /**
+     * @param values evaluation points
+     * @param from start calculation at this index
+     * @param to end calculation at this index
+     * @return the log of the transform's jacobian
+     */
+    public double getLogJacobian(double[] values, int from, int to);
+
 
     public static class LogTransform implements Transform, Citable {
 
@@ -74,10 +99,18 @@ public interface Transform {
             return Math.log(value);
         }
 
+        public double[] transform(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public double inverse(double value) {
             return Math.exp(value);
         }
 
+        public double[] inverse(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public String getTransformName() {
             return "log";
         }
@@ -86,6 +119,10 @@ public interface Transform {
             return -Math.log(value);
         }
 
+        public double getLogJacobian(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public List<Citation> getCitations() {
             List<Citation> citations = new ArrayList<Citation>();
             citations.add(new Citation(
@@ -100,6 +137,118 @@ public interface Transform {
         }
     }
 
+    public static class LogConstrainedSumTransform implements Transform, Citable {
+
+        public LogConstrainedSumTransform() {
+        }
+
+        public double transform(double value) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
+        public double[] transform(double[] values, int from, int to) {
+            double[] transformedValues = new double[to - from + 1];
+            int counter = 0;
+            for (int i = from; i <= to; i++) {
+                transformedValues[counter] = Math.log(values[i]);
+                counter++;
+            }
+            return transformedValues;
+        }
+
+        public double inverse(double value) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
+        //inverse transformation assumes a sum of elements equal to the number of elements
+        public double[] inverse(double[] values, int from, int to) {
+            double sum = (double)(to - from + 1);
+            double[] transformedValues = new double[to - from + 1];
+            int counter = 0;
+            double newSum = 0.0;
+            for (int i = from; i <= to; i++) {
+                transformedValues[counter] = Math.exp(values[i]);
+                newSum += transformedValues[counter];
+                counter++;
+            }
+            for (int i = 0; i < sum; i++) {
+                transformedValues[i] = (transformedValues[i] / newSum) * sum;
+            }
+            return transformedValues;
+        }
+
+        public String getTransformName() {
+            return "logConstrainedSum";
+        }
+
+        public double getLogJacobian(double value) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
+        public double getLogJacobian(double[] values, int from, int to) {
+            double sum = 0.0;
+            for (int i = from; i <= to; i++) {
+                sum -= Math.log(values[i]);
+            }
+            return sum;
+        }
+
+        public List<Citation> getCitations() {
+            List<Citation> citations = new ArrayList<Citation>();
+            citations.add(new Citation(
+                    new Author[]{
+                            new Author("MA", "Suchard"),
+                            new Author("G", "Baele"),
+                            new Author("P", "Lemey"),
+                    },
+                    Citation.Status.IN_PREPARATION
+            ));
+            return citations;
+        }
+
+        public static void main(String[] args) {
+
+            //specify starting values
+            double[] startValues = {1.5, 0.6, 0.9};
+            System.err.print("Starting values: ");
+            double startSum = 0.0;
+            for (int i = 0; i < startValues.length; i++) {
+                System.err.print(startValues[i] + " ");
+                startSum += startValues[i];
+            }
+            System.err.println("\nSum = " + startSum);
+
+            //perform transformation
+            double[] transformedValues = LOG_CONSTRAINED_SUM.transform(startValues, 0, startValues.length-1);
+            System.err.print("Transformed values: ");
+            for (int i = 0; i < transformedValues.length; i++) {
+                System.err.print(transformedValues[i] + " ");
+            }
+            System.err.println();
+
+            //add draw for normal distribution to transformed elements
+            for (int i = 0; i < transformedValues.length; i++) {
+                transformedValues[i] += 0.20 * Math.random();
+            }
+
+            //perform inverse transformation
+            transformedValues = LOG_CONSTRAINED_SUM.inverse(transformedValues, 0, transformedValues.length-1);
+            System.err.print("New values: ");
+            double endSum = 0.0;
+            for (int i = 0; i < transformedValues.length; i++) {
+                System.err.print(transformedValues[i] + " ");
+                endSum += transformedValues[i];
+            }
+            System.err.println("\nSum = " + endSum);
+
+            if (startSum != endSum) {
+                System.err.println("Starting and ending constraints differ!");
+            }
+
+        }
+
+    }
+
     public static class LogitTransform implements Transform {
 
         public LogitTransform() {
@@ -109,10 +258,18 @@ public interface Transform {
             return Math.log(value / (1.0 - value));
         }
 
+        public double[] transform(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public double inverse(double value) {
             return 1.0 / (1.0 + Math.exp(-value));
         }
 
+        public double[] inverse(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public String getTransformName() {
             return "logit";
         }
@@ -120,6 +277,11 @@ public interface Transform {
         public double getLogJacobian(double value) {
             return -Math.log(1.0 - value) - Math.log(value);
         }
+
+        public double getLogJacobian(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
     }
 
     public static class FisherZTransform implements Transform {
@@ -131,10 +293,18 @@ public interface Transform {
             return 0.5 * (Math.log(1.0 + value) - Math.log(1.0 - value));
         }
 
+        public double[] transform(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public double inverse(double value) {
             return (Math.exp(2 * value) - 1) / (Math.exp(2 * value) + 1);
         }
 
+        public double[] inverse(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public String getTransformName() {
             return "fisherz";
         }
@@ -142,6 +312,11 @@ public interface Transform {
         public double getLogJacobian(double value) {
             return -Math.log(1 - value) - Math.log(1 + value);
         }
+
+        public double getLogJacobian(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
     }
 
     public static class NoTransform implements Transform {
@@ -153,10 +328,18 @@ public interface Transform {
             return value;
         }
 
+        public double[] transform(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public double inverse(double value) {
             return value;
         }
 
+        public double[] inverse(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
         public String getTransformName() {
             return "none";
         }
@@ -164,6 +347,11 @@ public interface Transform {
         public double getLogJacobian(double value) {
             return 0.0;
         }
+
+        public double getLogJacobian(double[] values, int from, int to) {
+            throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
+        }
+
     }
 
     public class ParsedTransform {
@@ -179,8 +367,10 @@ public interface Transform {
 
             Transform thisTransform = Transform.NONE;
             String name = (String) xo.getAttribute(TYPE);
+            System.err.println("name: " + name);
             for (Transform type : Transform.transformList) {
                 if (name.equals(type.getTransformName())) {
+                    System.err.println(name + " --- " + type.getTransformName());
                     thisTransform = type;
                     break;
                 }
@@ -232,12 +422,14 @@ public interface Transform {
     public static final LogitTransform LOGIT = new LogitTransform();
     public static final NoTransform NONE = new NoTransform();
     public static final FisherZTransform FISHER_Z = new FisherZTransform();
+    public static final LogConstrainedSumTransform LOG_CONSTRAINED_SUM = new LogConstrainedSumTransform();
 
-    public static final Transform[] transformList = {LOG, LOGIT, NONE, FISHER_Z};
+    public static final Transform[] transformList = {LOG, LOG_CONSTRAINED_SUM, LOGIT, NONE, FISHER_Z};
 
     public static final String TRANSFORM = "transform";
     public static final String TYPE = "type";
     public static final String START = "start";
     public static final String END = "end";
     public static final String EVERY = "every";
+
 }
diff --git a/src/dr/xml/Report.java b/src/dr/xml/Report.java
index 4257fd5..b3f67c8 100644
--- a/src/dr/xml/Report.java
+++ b/src/dr/xml/Report.java
@@ -61,7 +61,7 @@ public class Report {
                 item = object.toString();
             }
             writer.print(item.trim());
-            writer.print(" ");
+            writer.print("\n");
         }
         writer.println();
         writer.flush();
diff --git a/src/test/dr/distibutions/InvGammaTest.java b/src/test/dr/distibutions/InvGammaTest.java
index a9de6bd..b4fabee 100644
--- a/src/test/dr/distibutions/InvGammaTest.java
+++ b/src/test/dr/distibutions/InvGammaTest.java
@@ -106,19 +106,19 @@ public class InvGammaTest extends TestCase {
                 assertEquals(d.cdf(cdf[k]), cdf[k + 1], 1e-10);
             }
 
-            int count[] = new int[cdf.length];
-            final int N = 100000;
-            for(int k = 0; k < N; ++k) {
-                double x = d.nextInverseGamma();
-                for(int l = 0; l < cdf.length; l += 2) {
-                    if( x < cdf[l] ) {
-                        count[l / 2] += 1;
-                    }
-                }
-            }
-            for(int l = 0; l < cdf.length; l += 2) {
-                assertEquals(count[l / 2] / (double) N, cdf[l + 1], 5e-3);
-            }
+//            int count[] = new int[cdf.length];
+//            final int N = 100000;
+//            for(int k = 0; k < N; ++k) {
+//                double x = d.nextInverseGamma();
+//                for(int l = 0; l < cdf.length; l += 2) {
+//                    if( x < cdf[l] ) {
+//                        count[l / 2] += 1;
+//                    }
+//                }
+//            }
+//            for(int l = 0; l < cdf.length; l += 2) {
+//                assertEquals(count[l / 2] / (double) N, cdf[l + 1], 5e-3);
+//            }
         }
     }
 }
diff --git a/src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java b/src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java
index 439799b..b442ee7 100644
--- a/src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java
+++ b/src/test/dr/evomodel/branchratemodel/UncorrelatedRelaxedClockTest.java
@@ -59,278 +59,278 @@ public class UncorrelatedRelaxedClockTest extends TraceCorrelationAssert {
         super(name);
     }
 
-    public void setUp() throws Exception {
-        super.setUp();
-
-        MathUtils.setSeed(666);
-
-        createAlignment(DENGUE4_TAXON_SEQUENCE, Nucleotides.INSTANCE);
-    }
-
-    public void testLogNormal() throws Exception {
-        meanParam = new Parameter.Default(LogNormalDistributionModelParser.MEAN, 2.3E-5, 0, 100.0);
-        stdevParam = new Parameter.Default(LogNormalDistributionModelParser.STDEV, 0.1, 0, 10.0);
-        ParametricDistributionModel distributionModel = new LogNormalDistributionModel(meanParam, stdevParam, 0.0, true, false); // meanInRealSpace="true"
-
-        ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
-
-//        <expectation name="posterior" value="-3927.81"/>
-//        <expectation name="ucld.mean" value="8.28472E-4"/>
-//        <expectation name="ucld.stdev" value="0.17435"/>
-//        <expectation name="meanRate" value="8.09909E-4"/>
-//        <expectation name="coefficientOfVariation" value="0.15982"/>
-//        <expectation name="covariance" value="-3.81803E-2"/>
-//        <expectation name="constant.popSize" value="37.3524"/>
-//        <expectation name="hky.kappa" value="18.3053"/>
-//        <expectation name="treeModel.rootHeight" value="69.2953"/>
-//        <expectation name="treeLikelihood" value="-3855.78"/>
-//        <expectation name="skyline" value="-72.0313"/>   ???
-
-        TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
-        assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3927.81);
-
-        likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
-        assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3855.78);
-
-        TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
-        assertExpectation(TREE_HEIGHT, treeHeightStats, 69.2953);
-
-        TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
-        assertExpectation(HKYParser.KAPPA, kappaStats, 18.06518);
-
-        TraceCorrelation ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.MEAN));
-        assertExpectation(LogNormalDistributionModelParser.MEAN, ucldStats, 8.0591451486E-4);
-
-        ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.STDEV));
-        assertExpectation(LogNormalDistributionModelParser.STDEV, ucldStats, 0.16846023066431434);
-
-        TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
-        assertExpectation("meanRate", rateStats, 8.010906E-4);
-
-        TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
-        assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.15982);
-
-        TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
-        assertExpectation("covariance", covarianceStats, -0.0260333026);
-
-        TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
-        assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3524);
-
-        TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
-        assertExpectation("coalescent", coalescentStats, -72.0313);
-    }
-
-    public void testExponential() throws Exception {
-        meanParam = new Parameter.Default(1.0);
-        meanParam.setId(DistributionModelParser.MEAN); 
-        stdevParam = null;
-        ParametricDistributionModel distributionModel = new ExponentialDistributionModel(meanParam); // offset = 0
-
-        ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
-
-        TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
-        assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3958.7409);
-//        System.out.println("likelihoodStats = " + likelihoodStats.getMean());
-
-        likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
-        assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3885.26939);
-//        System.out.println("treelikelihoodStats = " + likelihoodStats.getMean());
-
-        TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
-        assertExpectation(TREE_HEIGHT, treeHeightStats, 84.3529526);
-
-        TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
-        assertExpectation(HKYParser.KAPPA, kappaStats, 18.38065);
-
-        TraceCorrelation ucedStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(DistributionModelParser.MEAN));
-        assertExpectation(DistributionModelParser.MEAN, ucedStats, 0.0019344134887784579);
-//        System.out.println("ucedStats = " + ucedStats.getMean());
-
-        TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
-        assertExpectation("meanRate", rateStats, 0.0020538802366337084);
-//        System.out.println("rateStats = " + rateStats.getMean());
-
-        TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
-        assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.773609960455);
-//        System.out.println("coefficientOfVariationStats = " + coefficientOfVariationStats.getMean());
-
-        TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
-        assertExpectation("covariance", covarianceStats, -0.07042030641301375);
-//        System.out.println("covarianceStats = " + covarianceStats.getMean());
-
-        TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
-        assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 43.4478);
-//        System.out.println("popStats = " + popStats.getMean());
-
-        TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
-        assertExpectation("coalescent", coalescentStats, -73.4715);
-//        System.out.println("coalescentStats = " + coalescentStats.getMean());
-    }
-
-    private ArrayTraceList UncorrelatedRelaxedClock(ParametricDistributionModel distributionModel) throws Exception {
-        Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
-        ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
-
-        CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
-        coalescent.setId("coalescent");
-
-        // clock model        
-        Parameter rateCategoryParameter = new Parameter.Default(32);
-        rateCategoryParameter.setId(DiscretizedBranchRates.BRANCH_RATES); 
-
-        DiscretizedBranchRates branchRateModel = new DiscretizedBranchRates(treeModel, rateCategoryParameter, 
-                distributionModel, 1, false, Double.NaN, false, false);
-
-        RateStatistic meanRate = new RateStatistic("meanRate", treeModel, branchRateModel, true, true, RateStatisticParser.MEAN);
-        RateStatistic coefficientOfVariation = new RateStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, treeModel, branchRateModel,
-                true, true, RateStatisticParser.COEFFICIENT_OF_VARIATION);
-        RateCovarianceStatistic covariance = new RateCovarianceStatistic("covariance", treeModel, branchRateModel);
-
-        // Sub model
-        Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
-        Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
-
-        FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
-        HKY hky = new HKY(kappa, f);
-
-        //siteModel
-        GammaSiteModel siteModel = new GammaSiteModel(hky);
-        Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
-        siteModel.setMutationRateParameter(mu);
-
-        //treeLikelihood
-        SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
-
-        TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null,
-                false, false, true, false, false);
-        treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
-
-        // Operators
-        OperatorSchedule schedule = new SimpleOperatorSchedule();
-
-        MCMCOperator operator = new ScaleOperator(kappa, 0.75);
-        operator.setWeight(1.0);
-        schedule.addOperator(operator);
-
-        operator = new ScaleOperator(meanParam, 0.75);
-        operator.setWeight(3.0);
-        schedule.addOperator(operator);
-
-        if (stdevParam != null) {
-            operator = new ScaleOperator(stdevParam, 0.75);
-            operator.setWeight(3.0);
-            schedule.addOperator(operator);
-        }
-
-        Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
-        operator = new UpDownOperator(new Scalable[]{new Scalable.Default(meanParam)},
-                new Scalable[] {new Scalable.Default(allInternalHeights)}, 0.75, 3.0, CoercionMode.COERCION_ON);
-        schedule.addOperator(operator);
-
-        operator = new SwapOperator(rateCategoryParameter, 10);
-        operator.setWeight(1.0);
-        schedule.addOperator(operator);
-
-        operator = new RandomWalkIntegerOperator(rateCategoryParameter, 1, 10.0);
-        schedule.addOperator(operator);
-
-        operator = new UniformIntegerOperator(rateCategoryParameter, (int) (double)rateCategoryParameter.getBounds().getLowerLimit(0),
-                (int) (double)rateCategoryParameter.getBounds().getUpperLimit(0), 10.0);
-        schedule.addOperator(operator);
-
-        operator = new ScaleOperator(popSize, 0.75);
-        operator.setWeight(3.0);
-        schedule.addOperator(operator);
-
-        Parameter rootHeight = treeModel.getRootHeightParameter();
-        rootHeight.setId(TREE_HEIGHT);
-        operator = new ScaleOperator(rootHeight, 0.75);
-        operator.setWeight(3.0);
-        schedule.addOperator(operator);
-
-        Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
-        operator = new UniformOperator(internalHeights, 30.0);
-        schedule.addOperator(operator);
-
-        operator = new SubtreeSlideOperator(treeModel, 15.0, 38.0, true, false, false, false, CoercionMode.COERCION_ON);
-        schedule.addOperator(operator);
-
-        operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
-//        operator.doOperation();
-        schedule.addOperator(operator);
-
-        operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
-//        operator.doOperation();
-        schedule.addOperator(operator);
-
-        operator = new WilsonBalding(treeModel, 3.0);
-//        operator.doOperation();
-        schedule.addOperator(operator);
-
-        //CompoundLikelihood
-        List<Likelihood> likelihoods = new ArrayList<Likelihood>();
-        likelihoods.add(coalescent);
-        Likelihood prior = new CompoundLikelihood(0, likelihoods);
-        prior.setId(CompoundLikelihoodParser.PRIOR);
-
-        likelihoods.clear();
-        likelihoods.add(treeLikelihood);
-        Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
-
-        likelihoods.clear();
-        likelihoods.add(prior);
-        likelihoods.add(likelihood);
-        Likelihood posterior = new CompoundLikelihood(0, likelihoods);
-        posterior.setId(CompoundLikelihoodParser.POSTERIOR);
-
-        // Log
-        ArrayLogFormatter formatter = new ArrayLogFormatter(false);
-
-        MCLogger[] loggers = new MCLogger[2];
-        loggers[0] = new MCLogger(formatter, 10000, false);
-        loggers[0].add(posterior);
-        loggers[0].add(treeLikelihood);
-        loggers[0].add(rootHeight);
-        loggers[0].add(meanParam);
-        if (stdevParam != null) loggers[0].add(stdevParam);
-        loggers[0].add(meanRate);
-        loggers[0].add(coefficientOfVariation);
-        loggers[0].add(covariance);
-        loggers[0].add(popSize);
-        loggers[0].add(kappa);
-        loggers[0].add(coalescent);
-
-        loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
-        loggers[1].add(posterior);
-        loggers[1].add(treeLikelihood);
-        loggers[1].add(rootHeight);
-        loggers[1].add(meanRate);
-        loggers[1].add(coalescent);
-
-        // MCMC
-        MCMC mcmc = new MCMC("mcmc1");
-        MCMCOptions options = new MCMCOptions(10000000);
-
-        mcmc.setShowOperatorAnalysis(true);
-        mcmc.init(options, posterior, schedule, loggers);
-        mcmc.run();
-
-        // time
-        System.out.println(mcmc.getTimer().toString());
-
-        // Tracer
-        List<Trace> traces = formatter.getTraces();
-        ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
-
-        for (int i = 1; i < traces.size(); i++) {
-            traceList.analyseTrace(i);
-        }
-
-        return traceList;
-    }
-
-    public static Test suite() {
-        return new TestSuite(UncorrelatedRelaxedClockTest.class);
-    }
+//    public void setUp() throws Exception {
+//        super.setUp();
+//
+//        MathUtils.setSeed(666);
+//
+//        createAlignment(DENGUE4_TAXON_SEQUENCE, Nucleotides.INSTANCE);
+//    }
+//
+//    public void testLogNormal() throws Exception {
+//        meanParam = new Parameter.Default(LogNormalDistributionModelParser.MEAN, 2.3E-5, 0, 100.0);
+//        stdevParam = new Parameter.Default(LogNormalDistributionModelParser.STDEV, 0.1, 0, 10.0);
+//        ParametricDistributionModel distributionModel = new LogNormalDistributionModel(meanParam, stdevParam, 0.0, true, false); // meanInRealSpace="true"
+//
+//        ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
+//
+////        <expectation name="posterior" value="-3927.81"/>
+////        <expectation name="ucld.mean" value="8.28472E-4"/>
+////        <expectation name="ucld.stdev" value="0.17435"/>
+////        <expectation name="meanRate" value="8.09909E-4"/>
+////        <expectation name="coefficientOfVariation" value="0.15982"/>
+////        <expectation name="covariance" value="-3.81803E-2"/>
+////        <expectation name="constant.popSize" value="37.3524"/>
+////        <expectation name="hky.kappa" value="18.3053"/>
+////        <expectation name="treeModel.rootHeight" value="69.2953"/>
+////        <expectation name="treeLikelihood" value="-3855.78"/>
+////        <expectation name="skyline" value="-72.0313"/>   ???
+//
+//        TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
+//        assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3927.81);
+//
+//        likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
+//        assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3855.78);
+//
+//        TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
+//        assertExpectation(TREE_HEIGHT, treeHeightStats, 69.2953);
+//
+//        TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
+//        assertExpectation(HKYParser.KAPPA, kappaStats, 18.06518);
+//
+//        TraceCorrelation ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.MEAN));
+//        assertExpectation(LogNormalDistributionModelParser.MEAN, ucldStats, 8.0591451486E-4);
+//
+//        ucldStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(LogNormalDistributionModelParser.STDEV));
+//        assertExpectation(LogNormalDistributionModelParser.STDEV, ucldStats, 0.16846023066431434);
+//
+//        TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
+//        assertExpectation("meanRate", rateStats, 8.010906E-4);
+//
+//        TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
+//        assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.15982);
+//
+//        TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
+//        assertExpectation("covariance", covarianceStats, -0.0260333026);
+//
+//        TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
+//        assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3524);
+//
+//        TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
+//        assertExpectation("coalescent", coalescentStats, -72.0313);
+//    }
+//
+//    public void testExponential() throws Exception {
+//        meanParam = new Parameter.Default(1.0);
+//        meanParam.setId(DistributionModelParser.MEAN);
+//        stdevParam = null;
+//        ParametricDistributionModel distributionModel = new ExponentialDistributionModel(meanParam); // offset = 0
+//
+//        ArrayTraceList traceList = UncorrelatedRelaxedClock(distributionModel);
+//
+//        TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
+//        assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3958.7409);
+////        System.out.println("likelihoodStats = " + likelihoodStats.getMean());
+//
+//        likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
+//        assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3885.26939);
+////        System.out.println("treelikelihoodStats = " + likelihoodStats.getMean());
+//
+//        TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
+//        assertExpectation(TREE_HEIGHT, treeHeightStats, 84.3529526);
+//
+//        TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
+//        assertExpectation(HKYParser.KAPPA, kappaStats, 18.38065);
+//
+//        TraceCorrelation ucedStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(DistributionModelParser.MEAN));
+//        assertExpectation(DistributionModelParser.MEAN, ucedStats, 0.0019344134887784579);
+////        System.out.println("ucedStats = " + ucedStats.getMean());
+//
+//        TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("meanRate"));
+//        assertExpectation("meanRate", rateStats, 0.0020538802366337084);
+////        System.out.println("rateStats = " + rateStats.getMean());
+//
+//        TraceCorrelation coefficientOfVariationStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(RateStatisticParser.COEFFICIENT_OF_VARIATION));
+//        assertExpectation(RateStatisticParser.COEFFICIENT_OF_VARIATION, coefficientOfVariationStats, 0.773609960455);
+////        System.out.println("coefficientOfVariationStats = " + coefficientOfVariationStats.getMean());
+//
+//        TraceCorrelation covarianceStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("covariance"));
+//        assertExpectation("covariance", covarianceStats, -0.07042030641301375);
+////        System.out.println("covarianceStats = " + covarianceStats.getMean());
+//
+//        TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
+//        assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 43.4478);
+////        System.out.println("popStats = " + popStats.getMean());
+//
+//        TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
+//        assertExpectation("coalescent", coalescentStats, -73.4715);
+////        System.out.println("coalescentStats = " + coalescentStats.getMean());
+//    }
+//
+//    private ArrayTraceList UncorrelatedRelaxedClock(ParametricDistributionModel distributionModel) throws Exception {
+//        Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
+//        ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
+//
+//        CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
+//        coalescent.setId("coalescent");
+//
+//        // clock model
+//        Parameter rateCategoryParameter = new Parameter.Default(32);
+//        rateCategoryParameter.setId(DiscretizedBranchRates.BRANCH_RATES);
+//
+//        DiscretizedBranchRates branchRateModel = new DiscretizedBranchRates(treeModel, rateCategoryParameter,
+//                distributionModel, 1, false, Double.NaN, false, false);
+//
+//        RateStatistic meanRate = new RateStatistic("meanRate", treeModel, branchRateModel, true, true, RateStatisticParser.MEAN);
+//        RateStatistic coefficientOfVariation = new RateStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, treeModel, branchRateModel,
+//                true, true, RateStatisticParser.COEFFICIENT_OF_VARIATION);
+//        RateCovarianceStatistic covariance = new RateCovarianceStatistic("covariance", treeModel, branchRateModel);
+//
+//        // Sub model
+//        Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
+//        Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
+//
+//        FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
+//        HKY hky = new HKY(kappa, f);
+//
+//        //siteModel
+//        GammaSiteModel siteModel = new GammaSiteModel(hky);
+//        Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
+//        siteModel.setMutationRateParameter(mu);
+//
+//        //treeLikelihood
+//        SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
+//
+//        TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null,
+//                false, false, true, false, false);
+//        treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
+//
+//        // Operators
+//        OperatorSchedule schedule = new SimpleOperatorSchedule();
+//
+//        MCMCOperator operator = new ScaleOperator(kappa, 0.75);
+//        operator.setWeight(1.0);
+//        schedule.addOperator(operator);
+//
+//        operator = new ScaleOperator(meanParam, 0.75);
+//        operator.setWeight(3.0);
+//        schedule.addOperator(operator);
+//
+//        if (stdevParam != null) {
+//            operator = new ScaleOperator(stdevParam, 0.75);
+//            operator.setWeight(3.0);
+//            schedule.addOperator(operator);
+//        }
+//
+//        Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
+//        operator = new UpDownOperator(new Scalable[]{new Scalable.Default(meanParam)},
+//                new Scalable[] {new Scalable.Default(allInternalHeights)}, 0.75, 3.0, CoercionMode.COERCION_ON);
+//        schedule.addOperator(operator);
+//
+//        operator = new SwapOperator(rateCategoryParameter, 10);
+//        operator.setWeight(1.0);
+//        schedule.addOperator(operator);
+//
+//        operator = new RandomWalkIntegerOperator(rateCategoryParameter, 1, 10.0);
+//        schedule.addOperator(operator);
+//
+//        operator = new UniformIntegerOperator(rateCategoryParameter, (int) (double)rateCategoryParameter.getBounds().getLowerLimit(0),
+//                (int) (double)rateCategoryParameter.getBounds().getUpperLimit(0), 10.0);
+//        schedule.addOperator(operator);
+//
+//        operator = new ScaleOperator(popSize, 0.75);
+//        operator.setWeight(3.0);
+//        schedule.addOperator(operator);
+//
+//        Parameter rootHeight = treeModel.getRootHeightParameter();
+//        rootHeight.setId(TREE_HEIGHT);
+//        operator = new ScaleOperator(rootHeight, 0.75);
+//        operator.setWeight(3.0);
+//        schedule.addOperator(operator);
+//
+//        Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
+//        operator = new UniformOperator(internalHeights, 30.0);
+//        schedule.addOperator(operator);
+//
+//        operator = new SubtreeSlideOperator(treeModel, 15.0, 38.0, true, false, false, false, CoercionMode.COERCION_ON);
+//        schedule.addOperator(operator);
+//
+//        operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
+////        operator.doOperation();
+//        schedule.addOperator(operator);
+//
+//        operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
+////        operator.doOperation();
+//        schedule.addOperator(operator);
+//
+//        operator = new WilsonBalding(treeModel, 3.0);
+////        operator.doOperation();
+//        schedule.addOperator(operator);
+//
+//        //CompoundLikelihood
+//        List<Likelihood> likelihoods = new ArrayList<Likelihood>();
+//        likelihoods.add(coalescent);
+//        Likelihood prior = new CompoundLikelihood(0, likelihoods);
+//        prior.setId(CompoundLikelihoodParser.PRIOR);
+//
+//        likelihoods.clear();
+//        likelihoods.add(treeLikelihood);
+//        Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
+//
+//        likelihoods.clear();
+//        likelihoods.add(prior);
+//        likelihoods.add(likelihood);
+//        Likelihood posterior = new CompoundLikelihood(0, likelihoods);
+//        posterior.setId(CompoundLikelihoodParser.POSTERIOR);
+//
+//        // Log
+//        ArrayLogFormatter formatter = new ArrayLogFormatter(false);
+//
+//        MCLogger[] loggers = new MCLogger[2];
+//        loggers[0] = new MCLogger(formatter, 10000, false);
+//        loggers[0].add(posterior);
+//        loggers[0].add(treeLikelihood);
+//        loggers[0].add(rootHeight);
+//        loggers[0].add(meanParam);
+//        if (stdevParam != null) loggers[0].add(stdevParam);
+//        loggers[0].add(meanRate);
+//        loggers[0].add(coefficientOfVariation);
+//        loggers[0].add(covariance);
+//        loggers[0].add(popSize);
+//        loggers[0].add(kappa);
+//        loggers[0].add(coalescent);
+//
+//        loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
+//        loggers[1].add(posterior);
+//        loggers[1].add(treeLikelihood);
+//        loggers[1].add(rootHeight);
+//        loggers[1].add(meanRate);
+//        loggers[1].add(coalescent);
+//
+//        // MCMC
+//        MCMC mcmc = new MCMC("mcmc1");
+//        MCMCOptions options = new MCMCOptions(10000000);
+//
+//        mcmc.setShowOperatorAnalysis(true);
+//        mcmc.init(options, posterior, schedule, loggers);
+//        mcmc.run();
+//
+//        // time
+//        System.out.println(mcmc.getTimer().toString());
+//
+//        // Tracer
+//        List<Trace> traces = formatter.getTraces();
+//        ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
+//
+//        for (int i = 1; i < traces.size(); i++) {
+//            traceList.analyseTrace(i);
+//        }
+//
+//        return traceList;
+//    }
+//
+//    public static Test suite() {
+//        return new TestSuite(UncorrelatedRelaxedClockTest.class);
+//    }
 }
 
diff --git a/src/test/dr/evomodel/coalescent/MultiEpochExponentialTest.java b/src/test/dr/evomodel/coalescent/MultiEpochExponentialTest.java
new file mode 100644
index 0000000..b640f5c
--- /dev/null
+++ b/src/test/dr/evomodel/coalescent/MultiEpochExponentialTest.java
@@ -0,0 +1,171 @@
+/*
+ * MultiEpochExponentialTest.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package test.dr.evomodel.coalescent;
+
+import dr.evolution.coalescent.ExponentialExponential;
+import dr.evolution.coalescent.ExponentialGrowth;
+import dr.evolution.coalescent.MultiEpochExponential;
+import dr.evolution.util.Units;
+import dr.evomodel.coalescent.VDdemographicFunction;
+import dr.evomodel.coalescent.VariableDemographicModel;
+import junit.framework.TestCase;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class MultiEpochExponentialTest extends TestCase {
+
+    public void testExponentialExponential() {
+
+        Units.Type units = Units.Type.YEARS;
+
+        ExponentialExponential ee = new ExponentialExponential(units);
+        ee.setN0(N0);
+        ee.setGrowthRate(rates[0]);
+        ee.setAncestralGrowthRate(rates[1]);
+        ee.setTransitionTime(transitionTimes[0]);
+
+        MultiEpochExponential mee = new MultiEpochExponential(units, 2);
+        mee.setN0(N0);
+        for (int i = 0;i < rates.length; ++i) {
+            mee.setGrowthRate(i, rates[i]);
+        }
+        for (int i = 0; i < transitionTimes.length; ++i) {
+            mee.setTransitionTime(i, transitionTimes[i]);
+        }
+
+        for (double time = 0; time < 20; time += 1.0) {
+            double eeDemo = ee.getDemographic(time);
+            double meeDemo = mee.getDemographic(time);
+            assertEquals(eeDemo, meeDemo, tolerance1);
+        }
+
+        double start = 0.0;
+        double finish = 1.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = ee.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+
+
+        start = 0.5;
+        finish = 1.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = ee.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+
+        start = 11.0;
+        finish = 11.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = ee.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+    }
+
+
+    public void testThreeExponential() {
+
+        Units.Type units = Units.Type.YEARS;
+
+        ExponentialGrowth e = new ExponentialGrowth(units);
+        e.setN0(N0);
+        e.setGrowthRate(rates3[0]);
+
+        MultiEpochExponential mee = new MultiEpochExponential(units, 3);
+        mee.setN0(N0);
+        for (int i = 0; i < rates3.length; ++i) {
+            mee.setGrowthRate(i, rates3[i]);
+        }
+        for (int i = 0; i < transitionTimes3.length; ++i) {
+            mee.setTransitionTime(i, transitionTimes3[i]);
+        }
+
+        double start = 0.0;
+        double finish = 1.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = e.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+
+
+        start = 0.5;
+        finish = 1.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = e.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+
+        start = 11.0;
+        finish = 11.0;
+
+        for (; finish < 20.0; finish += 1.0) {
+            double eeInt = e.getIntegral(start, finish);
+            double meeIntN = mee.getNumericalIntegral(start, finish);
+            double meeIntA = mee.getAnalyticIntegral(start, finish);
+//            System.err.println(finish + ": " + eeInt + " " + meeIntN + " " + meeIntA);
+            assertEquals(eeInt, meeIntN, tolerance1);
+            assertEquals(meeIntN, meeIntA, tolerance2);
+        }
+    }
+
+
+    double N0 = 100;
+
+    double[] rates = new double[] { 0.2, -0.2 };
+    double[] transitionTimes = new double[] { 10.0 };
+
+    double[] rates3 = new double[] { 0.1, 0.1, 0.1 };
+    double[] transitionTimes3 = new double[] { 10.0, 15.0 };
+
+    private final static double tolerance1 = 1E-10;
+    private final static double tolerance2 = 1E-5;
+}
diff --git a/src/test/dr/evomodel/substmodel/TwoStateOccupancyMarkovRewardsTest.java b/src/test/dr/evomodel/substmodel/TwoStateOccupancyMarkovRewardsTest.java
index 1e4e1b2..8e59390 100644
--- a/src/test/dr/evomodel/substmodel/TwoStateOccupancyMarkovRewardsTest.java
+++ b/src/test/dr/evomodel/substmodel/TwoStateOccupancyMarkovRewardsTest.java
@@ -30,6 +30,8 @@ import dr.inference.markovjumps.MarkovReward;
 import dr.inference.markovjumps.SericolaSeriesMarkovReward;
 import dr.inference.markovjumps.TwoStateOccupancyMarkovReward;
 import dr.inference.model.Parameter;
+import dr.math.matrixAlgebra.Matrix;
+import dr.math.matrixAlgebra.Vector;
 import org.apache.commons.math.FunctionEvaluationException;
 import org.apache.commons.math.MaxIterationsExceededException;
 import org.apache.commons.math.analysis.UnivariateRealFunction;
@@ -44,49 +46,126 @@ public class TwoStateOccupancyMarkovRewardsTest extends MathTestCase {
 
     private static final double tolerance = 10E-3;
 
-    public void testTwoStateSericolaRewards1() {
-        final double rate = 0.0015;
-//        final double prop = 0.5;
-        final double prop = 0.66666;
+    private double sum(double[] v) {
+        double sum = 0.0;
+        for (double x : v) {
+            sum += x;
+        }
+        return sum;
+    }
 
-        final double branchLength = 2000.0;
-        final boolean print = false;
+    public void testNew() {
 
-//        MarkovReward markovReward = createMarkovReward(rate, prop);
-        MarkovReward markovReward = createSericolaMarkovReward(rate, prop);
+        // Equal rates
+        double rate = 2.1;
+        double prop = 0.5;
+        double eps = 0.3;
+        double branchLength = 2.4;
 
-        run(markovReward, rate, prop, branchLength, print, 1000);
-    }
+        MarkovReward markovReward1 = createMarkovReward(rate, prop);
+        MarkovReward markovReward2 = createSericolaMarkovReward(rate, prop);
 
-    public void testTwoStateSericolaRewards2() {
-        final double rate = 0.0015;
-        final double prop = 0.5;
-//        final double prop = 0.66666;
-        final double branchLength = 1000.0;
-        final boolean print = false;
+        double r1 = markovReward1.computePdf(eps * branchLength, branchLength, 0, 0);
+        double r2 = markovReward2.computePdf(eps * branchLength, branchLength, 0, 0);
 
-        MarkovReward markovReward = createMarkovReward(rate, prop);
-//        MarkovReward markovReward = createSericolaMarkovReward(rate, prop);
+        assertEquals(r1, r2, tolerance);
 
-        run(markovReward, rate, prop, branchLength, print, 1000);
-    }
+        // Unequal rates
+        prop = 0.501;
+        MarkovReward markovReward3 = createMarkovReward(rate, prop);
+        MarkovReward markovReward4 = createSericolaMarkovReward(rate, prop);
 
-    public void testLatentStateBranchRateModel() throws FunctionEvaluationException, MaxIterationsExceededException {
+        double r3 = markovReward3.computePdf(eps * branchLength, branchLength, 0, 0);
+        double r4 = markovReward4.computePdf(eps * branchLength, branchLength, 0, 0);
 
-        LatentStateBranchRateModel model = new LatentStateBranchRateModel(
-                new Parameter.Default(0.001), new Parameter.Default(0.5));
+        assertEquals(r3, r4, tolerance);
 
-        TrapezoidIntegrator integator = new TrapezoidIntegrator();
 
-        final double branchLength = 2000;
-        double integral = integator.integrate(new LatentStateDensityFunction(model, branchLength), 0.0, 1.0);
 
-        System.out.println("testLatentStateBeanchRateModel");
-        System.out.println("Integral = " + integral);
+        System.exit(-1);
+    }
+
+    public void testTwoStateSericolaRewards1() {
+//        final double rate = 0.0015;
+//        final double prop = 0.5;
+//        final double eps = 0.01;
+//        final double branchLength = 2000.0;
+
+        final double rate = 1;
+        final double prop = 0.5;
+        final double eps = 0.1;
+        final double branchLength = 1.2;
+
+        final boolean print = false;
+
+//
+//        TwoStateOccupancyMarkovReward two = (TwoStateOccupancyMarkovReward) markovReward;
+//
+////        run(markovReward, rate, prop, branchLength, print, 1000);
+//
+//        System.err.println(markovReward.computePdf(0.5 * branchLength, branchLength, 0, 0));
+//
+//        System.err.println(new Vector(two.getJumpProbabilities()));
+//
+//        MarkovReward markovReward2 = createMarkovReward(rate, prop + eps);
+//        TwoStateOccupancyMarkovReward two2 = (TwoStateOccupancyMarkovReward) markovReward2;
+//
+//        System.err.println(markovReward2.computePdf(0.5 * branchLength, branchLength, 0, 0));
+//        System.err.println(new Vector(two2.getJumpProbabilities()));
+//        System.err.println("");
+
+//        double[][] C = two2.getC();
+//        double[][] D = two2.getD();
+//
+//        System.err.println("C:\n" + new Matrix(C));
+//
+//        System.err.println("D:\n" + new Matrix(D));
 
-        assertEquals(integral, 1.0, tolerance);
     }
 
+//    public void testTwoStateSericolaRewards1() {
+//        final double rate = 0.0015;
+////        final double prop = 0.5;
+//        final double prop = 0.66666;
+//
+//        final double branchLength = 2000.0;
+//        final boolean print = false;
+//
+////        MarkovReward markovReward = createMarkovReward(rate, prop);
+//        MarkovReward markovReward = createSericolaMarkovReward(rate, prop);
+//
+//        run(markovReward, rate, prop, branchLength, print, 1000);
+//    }
+
+//    public void testTwoStateSericolaRewards2() {
+//        final double rate = 0.0015;
+//        final double prop = 0.5;
+////        final double prop = 0.66666;
+//        final double branchLength = 1000.0;
+//        final boolean print = false;
+//
+//        MarkovReward markovReward = createMarkovReward(rate, prop);
+////        MarkovReward markovReward = createSericolaMarkovReward(rate, prop);
+//
+//        run(markovReward, rate, prop, branchLength, print, 1000);
+//    }
+
+//    public void testLatentStateBranchRateModel() throws FunctionEvaluationException, MaxIterationsExceededException {
+//
+//        LatentStateBranchRateModel model = new LatentStateBranchRateModel(
+//                new Parameter.Default(0.001), new Parameter.Default(0.5));
+//
+//        TrapezoidIntegrator integator = new TrapezoidIntegrator();
+//
+//        final double branchLength = 2000;
+//        double integral = integator.integrate(new LatentStateDensityFunction(model, branchLength), 0.0, 1.0);
+//
+//        System.out.println("testLatentStateBeanchRateModel");
+//        System.out.println("Integral = " + integral);
+//
+//        assertEquals(integral, 1.0, tolerance);
+//    }
+
     private void run(MarkovReward markovReward, double rate, double prop, double branchLength,
                      boolean print, int length) {
         DensityFunction densityFunction = new DensityFunction(markovReward, branchLength, rate, prop);

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/beast-mcmc.git



More information about the debian-med-commit mailing list